var/home/core/zuul-output/0000755000175000017500000000000015116514645014535 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015116521231015466 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003362271015116521222017676 0ustar rootrootDec 11 10:17:48 crc systemd[1]: Starting Kubernetes Kubelet... Dec 11 10:17:48 crc restorecon[4571]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:48 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 10:17:49 crc restorecon[4571]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 10:17:49 crc restorecon[4571]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 11 10:17:49 crc kubenswrapper[4663]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 10:17:49 crc kubenswrapper[4663]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 11 10:17:49 crc kubenswrapper[4663]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 10:17:49 crc kubenswrapper[4663]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 10:17:49 crc kubenswrapper[4663]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 11 10:17:49 crc kubenswrapper[4663]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.369341 4663 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372318 4663 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372338 4663 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372343 4663 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372348 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372352 4663 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372358 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372363 4663 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372368 4663 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372371 4663 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372375 4663 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372378 4663 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372382 4663 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372385 4663 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372389 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372392 4663 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372395 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372399 4663 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372402 4663 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372405 4663 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372408 4663 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372412 4663 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372415 4663 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372418 4663 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372421 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372424 4663 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372434 4663 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372438 4663 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372442 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372446 4663 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372451 4663 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372454 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372459 4663 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372463 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372467 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372472 4663 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372476 4663 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372480 4663 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372484 4663 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372488 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372493 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372497 4663 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372501 4663 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372505 4663 feature_gate.go:330] unrecognized feature gate: Example Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372509 4663 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372513 4663 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372518 4663 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372522 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372526 4663 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372530 4663 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372533 4663 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372537 4663 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372540 4663 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372543 4663 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372546 4663 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372550 4663 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372553 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372557 4663 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372561 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372564 4663 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372567 4663 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372571 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372574 4663 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372578 4663 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372581 4663 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372584 4663 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372588 4663 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372591 4663 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372594 4663 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372599 4663 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372603 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.372607 4663 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373032 4663 flags.go:64] FLAG: --address="0.0.0.0" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373047 4663 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373057 4663 flags.go:64] FLAG: --anonymous-auth="true" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373063 4663 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373069 4663 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373072 4663 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373078 4663 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373084 4663 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373088 4663 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373092 4663 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373096 4663 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373101 4663 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373105 4663 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373108 4663 flags.go:64] FLAG: --cgroup-root="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373113 4663 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373117 4663 flags.go:64] FLAG: --client-ca-file="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373120 4663 flags.go:64] FLAG: --cloud-config="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373124 4663 flags.go:64] FLAG: --cloud-provider="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373128 4663 flags.go:64] FLAG: --cluster-dns="[]" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373134 4663 flags.go:64] FLAG: --cluster-domain="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373138 4663 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373142 4663 flags.go:64] FLAG: --config-dir="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373145 4663 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373150 4663 flags.go:64] FLAG: --container-log-max-files="5" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373155 4663 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373159 4663 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373163 4663 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373167 4663 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373172 4663 flags.go:64] FLAG: --contention-profiling="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373175 4663 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373193 4663 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373197 4663 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373202 4663 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373208 4663 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373212 4663 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373216 4663 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373219 4663 flags.go:64] FLAG: --enable-load-reader="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373224 4663 flags.go:64] FLAG: --enable-server="true" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373228 4663 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373233 4663 flags.go:64] FLAG: --event-burst="100" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373237 4663 flags.go:64] FLAG: --event-qps="50" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373241 4663 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373245 4663 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373249 4663 flags.go:64] FLAG: --eviction-hard="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373254 4663 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373258 4663 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373261 4663 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373265 4663 flags.go:64] FLAG: --eviction-soft="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373269 4663 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373273 4663 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373276 4663 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373280 4663 flags.go:64] FLAG: --experimental-mounter-path="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373283 4663 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373287 4663 flags.go:64] FLAG: --fail-swap-on="true" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373291 4663 flags.go:64] FLAG: --feature-gates="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373296 4663 flags.go:64] FLAG: --file-check-frequency="20s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373299 4663 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373304 4663 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373308 4663 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373312 4663 flags.go:64] FLAG: --healthz-port="10248" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373316 4663 flags.go:64] FLAG: --help="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373320 4663 flags.go:64] FLAG: --hostname-override="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373323 4663 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373328 4663 flags.go:64] FLAG: --http-check-frequency="20s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373334 4663 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373338 4663 flags.go:64] FLAG: --image-credential-provider-config="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373343 4663 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373347 4663 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373351 4663 flags.go:64] FLAG: --image-service-endpoint="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373355 4663 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373359 4663 flags.go:64] FLAG: --kube-api-burst="100" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373363 4663 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373367 4663 flags.go:64] FLAG: --kube-api-qps="50" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373371 4663 flags.go:64] FLAG: --kube-reserved="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373375 4663 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373379 4663 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373383 4663 flags.go:64] FLAG: --kubelet-cgroups="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373387 4663 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373391 4663 flags.go:64] FLAG: --lock-file="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373394 4663 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373399 4663 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373403 4663 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373408 4663 flags.go:64] FLAG: --log-json-split-stream="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373412 4663 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373415 4663 flags.go:64] FLAG: --log-text-split-stream="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373419 4663 flags.go:64] FLAG: --logging-format="text" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373423 4663 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373428 4663 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373431 4663 flags.go:64] FLAG: --manifest-url="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373435 4663 flags.go:64] FLAG: --manifest-url-header="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373441 4663 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373445 4663 flags.go:64] FLAG: --max-open-files="1000000" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373451 4663 flags.go:64] FLAG: --max-pods="110" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373455 4663 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373459 4663 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373463 4663 flags.go:64] FLAG: --memory-manager-policy="None" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373468 4663 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373472 4663 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373476 4663 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373480 4663 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373491 4663 flags.go:64] FLAG: --node-status-max-images="50" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373495 4663 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373499 4663 flags.go:64] FLAG: --oom-score-adj="-999" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373503 4663 flags.go:64] FLAG: --pod-cidr="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373507 4663 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373515 4663 flags.go:64] FLAG: --pod-manifest-path="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373519 4663 flags.go:64] FLAG: --pod-max-pids="-1" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373523 4663 flags.go:64] FLAG: --pods-per-core="0" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373527 4663 flags.go:64] FLAG: --port="10250" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373531 4663 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373535 4663 flags.go:64] FLAG: --provider-id="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373539 4663 flags.go:64] FLAG: --qos-reserved="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373543 4663 flags.go:64] FLAG: --read-only-port="10255" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373547 4663 flags.go:64] FLAG: --register-node="true" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373551 4663 flags.go:64] FLAG: --register-schedulable="true" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373555 4663 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373562 4663 flags.go:64] FLAG: --registry-burst="10" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373566 4663 flags.go:64] FLAG: --registry-qps="5" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373569 4663 flags.go:64] FLAG: --reserved-cpus="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373573 4663 flags.go:64] FLAG: --reserved-memory="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373579 4663 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373583 4663 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373587 4663 flags.go:64] FLAG: --rotate-certificates="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373591 4663 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373595 4663 flags.go:64] FLAG: --runonce="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373599 4663 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373603 4663 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373607 4663 flags.go:64] FLAG: --seccomp-default="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373617 4663 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373621 4663 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373625 4663 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373630 4663 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373634 4663 flags.go:64] FLAG: --storage-driver-password="root" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373638 4663 flags.go:64] FLAG: --storage-driver-secure="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373642 4663 flags.go:64] FLAG: --storage-driver-table="stats" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373645 4663 flags.go:64] FLAG: --storage-driver-user="root" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373649 4663 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373653 4663 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373657 4663 flags.go:64] FLAG: --system-cgroups="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373661 4663 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373667 4663 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373671 4663 flags.go:64] FLAG: --tls-cert-file="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373675 4663 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373680 4663 flags.go:64] FLAG: --tls-min-version="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373683 4663 flags.go:64] FLAG: --tls-private-key-file="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373687 4663 flags.go:64] FLAG: --topology-manager-policy="none" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373691 4663 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373695 4663 flags.go:64] FLAG: --topology-manager-scope="container" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373699 4663 flags.go:64] FLAG: --v="2" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373705 4663 flags.go:64] FLAG: --version="false" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373710 4663 flags.go:64] FLAG: --vmodule="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373715 4663 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.373719 4663 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373831 4663 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373836 4663 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373841 4663 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373845 4663 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373850 4663 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373853 4663 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373857 4663 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373863 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373866 4663 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373870 4663 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373874 4663 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373877 4663 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373881 4663 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373886 4663 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373889 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373893 4663 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373896 4663 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373900 4663 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373915 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373919 4663 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373922 4663 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373926 4663 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373929 4663 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373932 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373936 4663 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373939 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373942 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373945 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373949 4663 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373952 4663 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373955 4663 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373959 4663 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373962 4663 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373965 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373969 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373972 4663 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373975 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373978 4663 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373981 4663 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373986 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373990 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373993 4663 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.373997 4663 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374000 4663 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374003 4663 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374007 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374010 4663 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374014 4663 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374017 4663 feature_gate.go:330] unrecognized feature gate: Example Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374020 4663 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374024 4663 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374027 4663 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374031 4663 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374034 4663 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374037 4663 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374041 4663 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374044 4663 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374048 4663 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374051 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374056 4663 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374060 4663 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374064 4663 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374068 4663 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374071 4663 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374075 4663 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374080 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374084 4663 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374087 4663 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374091 4663 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374095 4663 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.374098 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.374106 4663 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.381362 4663 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.381392 4663 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381448 4663 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381455 4663 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381460 4663 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381464 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381468 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381471 4663 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381476 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381480 4663 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381484 4663 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381488 4663 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381492 4663 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381495 4663 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381498 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381503 4663 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381508 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381512 4663 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381515 4663 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381519 4663 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381523 4663 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381526 4663 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381529 4663 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381533 4663 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381537 4663 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381541 4663 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381544 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381547 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381551 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381555 4663 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381560 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381563 4663 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381567 4663 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381570 4663 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381573 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381577 4663 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381582 4663 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381585 4663 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381589 4663 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381595 4663 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381599 4663 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381604 4663 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381608 4663 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381612 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381616 4663 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381620 4663 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381624 4663 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381628 4663 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381632 4663 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381638 4663 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381641 4663 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381645 4663 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381649 4663 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381652 4663 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381655 4663 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381659 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381662 4663 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381666 4663 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381671 4663 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381675 4663 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381679 4663 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381683 4663 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381687 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381690 4663 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381694 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381698 4663 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381701 4663 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381705 4663 feature_gate.go:330] unrecognized feature gate: Example Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381708 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381711 4663 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381715 4663 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381718 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381722 4663 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.381728 4663 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381825 4663 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381833 4663 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381839 4663 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381843 4663 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381847 4663 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381851 4663 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381854 4663 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381858 4663 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381861 4663 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381866 4663 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381871 4663 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381874 4663 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381878 4663 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381881 4663 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381885 4663 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381888 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381891 4663 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381895 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381898 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381915 4663 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381919 4663 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381922 4663 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381925 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381931 4663 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381935 4663 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381938 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381942 4663 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381945 4663 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381948 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381952 4663 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381956 4663 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381960 4663 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381963 4663 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381966 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381970 4663 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381974 4663 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381977 4663 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381980 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381983 4663 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381987 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381991 4663 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381994 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.381999 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382003 4663 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382006 4663 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382010 4663 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382014 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382017 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382023 4663 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382026 4663 feature_gate.go:330] unrecognized feature gate: Example Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382029 4663 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382032 4663 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382036 4663 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382039 4663 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382043 4663 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382046 4663 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382050 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382053 4663 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382056 4663 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382059 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382062 4663 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382066 4663 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382069 4663 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382073 4663 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382076 4663 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382079 4663 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382082 4663 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382086 4663 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382089 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382092 4663 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.382096 4663 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.382101 4663 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.382867 4663 server.go:940] "Client rotation is on, will bootstrap in background" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.385674 4663 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.385752 4663 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.386839 4663 server.go:997] "Starting client certificate rotation" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.386866 4663 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.387072 4663 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-07 20:14:47.215316136 +0000 UTC Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.387167 4663 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.396859 4663 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.400254 4663 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.400954 4663 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.411588 4663 log.go:25] "Validated CRI v1 runtime API" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.429708 4663 log.go:25] "Validated CRI v1 image API" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.431729 4663 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.434593 4663 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-11-10-13-57-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.434635 4663 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:49 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm:{mountpoint:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm major:0 minor:42 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:50 fsType:tmpfs blockSize:0} overlay_0-43:{mountpoint:/var/lib/containers/storage/overlay/94b752e0a51c0134b00ddef6dc7a933a9d7c1d9bdc88a18dae4192a0d557d623/merged major:0 minor:43 fsType:overlay blockSize:0}] Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.447463 4663 manager.go:217] Machine: {Timestamp:2025-12-11 10:17:49.446360806 +0000 UTC m=+0.264290610 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2445404 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:18e040fd-bd85-4c98-8f53-60800a512169 BootID:a09a2675-5943-46f1-b4c2-67ff05e820a6 Filesystems:[{Device:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm DeviceMajor:0 DeviceMinor:42 Capacity:65536000 Type:vfs Inodes:4108169 HasInodes:true} {Device:overlay_0-43 DeviceMajor:0 DeviceMinor:43 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:49 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:50 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:ac:a1:b3 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:enp3s0 MacAddress:fa:16:3e:ac:a1:b3 Speed:-1 Mtu:1500} {Name:enp7s0 MacAddress:fa:16:3e:e2:ae:c2 Speed:-1 Mtu:1440} {Name:enp7s0.20 MacAddress:52:54:00:e7:86:9f Speed:-1 Mtu:1436} {Name:enp7s0.21 MacAddress:52:54:00:42:50:e1 Speed:-1 Mtu:1436} {Name:enp7s0.22 MacAddress:52:54:00:eb:06:a5 Speed:-1 Mtu:1436} {Name:eth10 MacAddress:7a:b8:b9:4c:5c:6e Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:2a:13:33:c7:b0:40 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:65536 Type:Data Level:1} {Id:0 Size:65536 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:65536 Type:Data Level:1} {Id:1 Size:65536 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:65536 Type:Data Level:1} {Id:10 Size:65536 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:65536 Type:Data Level:1} {Id:11 Size:65536 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:65536 Type:Data Level:1} {Id:2 Size:65536 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:65536 Type:Data Level:1} {Id:3 Size:65536 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:65536 Type:Data Level:1} {Id:4 Size:65536 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:65536 Type:Data Level:1} {Id:5 Size:65536 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:65536 Type:Data Level:1} {Id:6 Size:65536 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:65536 Type:Data Level:1} {Id:7 Size:65536 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:65536 Type:Data Level:1} {Id:8 Size:65536 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:65536 Type:Data Level:1} {Id:9 Size:65536 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.447655 4663 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.447768 4663 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.449126 4663 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.449333 4663 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.449372 4663 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.449567 4663 topology_manager.go:138] "Creating topology manager with none policy" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.449579 4663 container_manager_linux.go:303] "Creating device plugin manager" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.449864 4663 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.449892 4663 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.450094 4663 state_mem.go:36] "Initialized new in-memory state store" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.450206 4663 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.451953 4663 kubelet.go:418] "Attempting to sync node with API server" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.451974 4663 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.452000 4663 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.452013 4663 kubelet.go:324] "Adding apiserver pod source" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.452032 4663 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.454581 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.454759 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.454908 4663 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.454929 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.455959 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.457318 4663 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.459030 4663 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.460385 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.460486 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.460537 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.460581 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.460635 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.460679 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.460721 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.460767 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.460811 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.460854 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.460913 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.460969 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.461407 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.461868 4663 server.go:1280] "Started kubelet" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.462198 4663 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.462248 4663 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.462819 4663 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.463257 4663 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 11 10:17:49 crc systemd[1]: Started Kubernetes Kubelet. Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.464469 4663 server.go:460] "Adding debug handlers to kubelet server" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.464594 4663 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.464621 4663 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.464695 4663 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 20:18:40.113135519 +0000 UTC Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.464812 4663 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.464828 4663 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.464845 4663 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.464936 4663 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.466059 4663 factory.go:55] Registering systemd factory Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.466084 4663 factory.go:221] Registration of the systemd container factory successfully Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.466263 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="200ms" Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.466404 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.466479 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.466853 4663 factory.go:153] Registering CRI-O factory Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.466952 4663 factory.go:221] Registration of the crio container factory successfully Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.466134 4663 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.25.89:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188021d8ea99f248 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 10:17:49.461844552 +0000 UTC m=+0.279774335,LastTimestamp:2025-12-11 10:17:49.461844552 +0000 UTC m=+0.279774335,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.467031 4663 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.467170 4663 factory.go:103] Registering Raw factory Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.467401 4663 manager.go:1196] Started watching for new ooms in manager Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.468913 4663 manager.go:319] Starting recovery of all containers Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.472227 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.472334 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.472406 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.472481 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.472546 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.472600 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.472654 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.472708 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.472773 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.472836 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.472887 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.472960 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473012 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473075 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473142 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473221 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473290 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473357 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473421 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473483 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473541 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473605 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473665 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473728 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473792 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473848 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473926 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.473989 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474042 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474100 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474162 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474244 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474303 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474365 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474421 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474478 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474530 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474585 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474638 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474696 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474752 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474808 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474867 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.474935 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.476831 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.476919 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477242 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477304 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477332 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477448 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477516 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477538 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477574 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477601 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477615 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477634 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477650 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477667 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477678 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477693 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477706 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477718 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477733 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477744 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477758 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477768 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477779 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477796 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477810 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477824 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477839 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477850 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477868 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477878 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477890 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477917 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477927 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477942 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477953 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477966 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477982 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.477996 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478013 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478028 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478040 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478052 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478064 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478081 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478119 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478130 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478145 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478159 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478173 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478201 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478212 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478228 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478241 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478256 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478268 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478278 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478292 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478303 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478314 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478328 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478417 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478460 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478484 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478500 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478522 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478543 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478557 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478573 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478585 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478599 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478611 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478624 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478634 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478647 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478674 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478689 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478702 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478713 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478726 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478737 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478747 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478761 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478772 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478785 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478796 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478807 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478820 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478830 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478845 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478855 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478872 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478886 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478913 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478930 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478942 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478954 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478968 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478978 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.478994 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479005 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479015 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479028 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479039 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479050 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479065 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479075 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479089 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479099 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479110 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479124 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479134 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479146 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479158 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479170 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.479201 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482233 4663 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482278 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482294 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482305 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482317 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482329 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482340 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482365 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482376 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482387 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482401 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482411 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482422 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482434 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482445 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482454 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482464 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482475 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482486 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482498 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482510 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482522 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482533 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482544 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482554 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482565 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482576 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482586 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482598 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482612 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482628 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482641 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482652 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482663 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482672 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482682 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482692 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482703 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482715 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482725 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482736 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482751 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482762 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482772 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482781 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482790 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482805 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482818 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482829 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482840 4663 reconstruct.go:97] "Volume reconstruction finished" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.482847 4663 reconciler.go:26] "Reconciler: start to sync state" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.487141 4663 manager.go:324] Recovery completed Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.497300 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.498765 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.498813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.498827 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.499988 4663 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.500016 4663 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.500036 4663 state_mem.go:36] "Initialized new in-memory state store" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.502098 4663 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.503391 4663 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.503428 4663 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.503454 4663 kubelet.go:2335] "Starting kubelet main sync loop" Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.503494 4663 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.503971 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.504020 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.505169 4663 policy_none.go:49] "None policy: Start" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.506053 4663 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.506084 4663 state_mem.go:35] "Initializing new in-memory state store" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.555467 4663 manager.go:334] "Starting Device Plugin manager" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.555530 4663 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.555544 4663 server.go:79] "Starting device plugin registration server" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.556125 4663 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.556145 4663 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.556500 4663 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.556635 4663 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.556647 4663 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.563051 4663 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.603574 4663 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.603734 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.604958 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.604995 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.605005 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.605156 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.605385 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.605422 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.605946 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.605956 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.605972 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.605983 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.605972 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.606029 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.606133 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.606165 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.606202 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.606833 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.606857 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.606866 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.606955 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.607026 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.607053 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.607063 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.607072 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.607074 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.607501 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.607522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.607532 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.607540 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.607669 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.607693 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.607959 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.608053 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.608080 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.608636 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.608676 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.608687 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.608924 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.608997 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.609308 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.609332 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.609340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.609855 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.609875 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.609899 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.656690 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.657495 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.657522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.657529 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.657546 4663 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.657963 4663 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.89:6443: connect: connection refused" node="crc" Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.668394 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="400ms" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684594 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684630 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684716 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684770 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684792 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684807 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684828 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684846 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684865 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684908 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684935 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684962 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.684988 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.685034 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.685060 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.785990 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786055 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786076 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786092 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786108 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786122 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786137 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786153 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786173 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786206 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786296 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786309 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786326 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786373 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786405 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786421 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786416 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786457 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786459 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786476 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786465 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786435 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786521 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786549 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786589 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786597 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786608 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786631 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786727 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.786634 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.858119 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.859491 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.859539 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.859549 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.859575 4663 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 10:17:49 crc kubenswrapper[4663]: E1211 10:17:49.860083 4663 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.89:6443: connect: connection refused" node="crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.933749 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.939121 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.955328 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.957554 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-f17db01725c9a518f2a83ecdb41dc87ee9d3035beb718be78b10702eb4ef2680 WatchSource:0}: Error finding container f17db01725c9a518f2a83ecdb41dc87ee9d3035beb718be78b10702eb4ef2680: Status 404 returned error can't find the container with id f17db01725c9a518f2a83ecdb41dc87ee9d3035beb718be78b10702eb4ef2680 Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.961841 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-6c40ebf10f6716a96fb70ffc60fb5d5cabcac52b6f935bf1afe2feaf39280592 WatchSource:0}: Error finding container 6c40ebf10f6716a96fb70ffc60fb5d5cabcac52b6f935bf1afe2feaf39280592: Status 404 returned error can't find the container with id 6c40ebf10f6716a96fb70ffc60fb5d5cabcac52b6f935bf1afe2feaf39280592 Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.968065 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: I1211 10:17:49.972317 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.972639 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-c9637ea93a8fc7aec75f146ccef1f245273a6843cb81b68d92a159a5f7a1486d WatchSource:0}: Error finding container c9637ea93a8fc7aec75f146ccef1f245273a6843cb81b68d92a159a5f7a1486d: Status 404 returned error can't find the container with id c9637ea93a8fc7aec75f146ccef1f245273a6843cb81b68d92a159a5f7a1486d Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.984096 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-11134e14fdb9dda4cb6f835ba6df700ec850bedc655b7da4adbb788359cf87eb WatchSource:0}: Error finding container 11134e14fdb9dda4cb6f835ba6df700ec850bedc655b7da4adbb788359cf87eb: Status 404 returned error can't find the container with id 11134e14fdb9dda4cb6f835ba6df700ec850bedc655b7da4adbb788359cf87eb Dec 11 10:17:49 crc kubenswrapper[4663]: W1211 10:17:49.985342 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-69091ee4e8211e43d94fcb5cf0473a104e4849d6150803eeea9c1372332c40c3 WatchSource:0}: Error finding container 69091ee4e8211e43d94fcb5cf0473a104e4849d6150803eeea9c1372332c40c3: Status 404 returned error can't find the container with id 69091ee4e8211e43d94fcb5cf0473a104e4849d6150803eeea9c1372332c40c3 Dec 11 10:17:50 crc kubenswrapper[4663]: E1211 10:17:50.069835 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="800ms" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.260455 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.261519 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.261555 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.261564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.261584 4663 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 10:17:50 crc kubenswrapper[4663]: E1211 10:17:50.261982 4663 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.89:6443: connect: connection refused" node="crc" Dec 11 10:17:50 crc kubenswrapper[4663]: W1211 10:17:50.337663 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 11 10:17:50 crc kubenswrapper[4663]: E1211 10:17:50.337742 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.463990 4663 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.465009 4663 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 23:42:22.812935302 +0000 UTC Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.465050 4663 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 565h24m32.3478867s for next certificate rotation Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.507760 4663 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37" exitCode=0 Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.507855 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37"} Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.507975 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c9637ea93a8fc7aec75f146ccef1f245273a6843cb81b68d92a159a5f7a1486d"} Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.508066 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.509212 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356" exitCode=0 Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.509264 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356"} Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.509341 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f17db01725c9a518f2a83ecdb41dc87ee9d3035beb718be78b10702eb4ef2680"} Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.509431 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.509460 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.509464 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.509470 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.510161 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.510200 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.510209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.510372 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507"} Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.510408 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6c40ebf10f6716a96fb70ffc60fb5d5cabcac52b6f935bf1afe2feaf39280592"} Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.512102 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.513887 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.513913 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.513923 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.514201 4663 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="20d753f2430dca141daa29acaf4e0d0c3ace253effdce543a0ed0e96cfad72d0" exitCode=0 Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.514279 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"20d753f2430dca141daa29acaf4e0d0c3ace253effdce543a0ed0e96cfad72d0"} Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.514330 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"69091ee4e8211e43d94fcb5cf0473a104e4849d6150803eeea9c1372332c40c3"} Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.514466 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.515247 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.515267 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.515276 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.516021 4663 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="3d781e969fff35a9312c0d9cb5e96a751741064ad0bae9e814ceaf39220f7500" exitCode=0 Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.516092 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"3d781e969fff35a9312c0d9cb5e96a751741064ad0bae9e814ceaf39220f7500"} Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.516193 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"11134e14fdb9dda4cb6f835ba6df700ec850bedc655b7da4adbb788359cf87eb"} Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.516246 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.517118 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.517146 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:50 crc kubenswrapper[4663]: I1211 10:17:50.517156 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:50 crc kubenswrapper[4663]: W1211 10:17:50.710951 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 11 10:17:50 crc kubenswrapper[4663]: E1211 10:17:50.711034 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 11 10:17:50 crc kubenswrapper[4663]: E1211 10:17:50.870817 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="1.6s" Dec 11 10:17:50 crc kubenswrapper[4663]: W1211 10:17:50.916971 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 11 10:17:50 crc kubenswrapper[4663]: E1211 10:17:50.917049 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 11 10:17:51 crc kubenswrapper[4663]: W1211 10:17:51.055674 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 11 10:17:51 crc kubenswrapper[4663]: E1211 10:17:51.055756 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.062086 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.064278 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.064312 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.064324 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.064342 4663 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 10:17:51 crc kubenswrapper[4663]: E1211 10:17:51.064810 4663 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.89:6443: connect: connection refused" node="crc" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.520653 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.520699 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.520712 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.520720 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.520730 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.520833 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.521654 4663 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.522101 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.522127 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.522136 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.524408 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.524444 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.524455 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.524527 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.525104 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.525127 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.525135 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.526601 4663 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8c093c729e098e7dd19bd600ddaefbada59b929a740533c804e855c12c17fec5" exitCode=0 Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.526679 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8c093c729e098e7dd19bd600ddaefbada59b929a740533c804e855c12c17fec5"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.526770 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.527413 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.527433 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.527441 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.528860 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"06faf32fe966c534260438465b3da92def0fd10908c333aa992567764401d34a"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.528910 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.529508 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.529533 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.529544 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.531489 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"625fddf0941d4a6bb39e6e05fa5888872b1e22f59fde1ffcbcb9990def6a4194"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.531516 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7d29ef8426056a5691774eaa72cd716c08a424df3a0b426e1d7897d839230622"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.531527 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"dbc58fcfdfe62d2170360ef7d64580a71e7d25d21800257757b80a2bccff9a5e"} Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.531582 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.532085 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.532111 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.532120 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:51 crc kubenswrapper[4663]: I1211 10:17:51.554419 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.537399 4663 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="bcad99333cef3c25a76d1d1973439db4d505cbd3a9b5cb353bd708aaed107d50" exitCode=0 Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.537540 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"bcad99333cef3c25a76d1d1973439db4d505cbd3a9b5cb353bd708aaed107d50"} Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.537636 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.537690 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.537741 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.537565 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.539326 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.539358 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.539369 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.539369 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.539382 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.539393 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.539403 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.539373 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.539489 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.539503 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.539404 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.539550 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.665552 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.666779 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.666822 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.666834 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:52 crc kubenswrapper[4663]: I1211 10:17:52.666878 4663 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.544452 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"650c88b6e68d08d20031464b545fb81f3e336e9c05b934b412339f564d695467"} Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.544508 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0f8c9e83c81f2e27c1362021ccd701d77a5530e580cab8b4d8e35d10b11a3c59"} Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.544522 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a6d1777dec0ddd0ddb956014778b7162342bdeaf8a15e1561ef4d8b0c96ca7ec"} Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.544531 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9c34a924010a180dc3aedd1bf97afc89cb1fb4a07a612ba51ef71be2f93e320d"} Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.544550 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6f05ba7b44829dc4d846e880dae1939044fca92c81181b74e7bd2b620d8d23e3"} Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.544674 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.545431 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.545480 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.545491 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.689023 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.689214 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.690396 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.690435 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:53 crc kubenswrapper[4663]: I1211 10:17:53.690445 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:54 crc kubenswrapper[4663]: I1211 10:17:54.425424 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:54 crc kubenswrapper[4663]: I1211 10:17:54.425584 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:54 crc kubenswrapper[4663]: I1211 10:17:54.426671 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:54 crc kubenswrapper[4663]: I1211 10:17:54.426700 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:54 crc kubenswrapper[4663]: I1211 10:17:54.426709 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:55 crc kubenswrapper[4663]: I1211 10:17:55.513121 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:17:55 crc kubenswrapper[4663]: I1211 10:17:55.513321 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:55 crc kubenswrapper[4663]: I1211 10:17:55.514398 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:55 crc kubenswrapper[4663]: I1211 10:17:55.514450 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:55 crc kubenswrapper[4663]: I1211 10:17:55.514459 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:55 crc kubenswrapper[4663]: I1211 10:17:55.814957 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:55 crc kubenswrapper[4663]: I1211 10:17:55.815109 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:55 crc kubenswrapper[4663]: I1211 10:17:55.816029 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:55 crc kubenswrapper[4663]: I1211 10:17:55.816068 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:55 crc kubenswrapper[4663]: I1211 10:17:55.816077 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:55 crc kubenswrapper[4663]: I1211 10:17:55.944538 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:17:56 crc kubenswrapper[4663]: I1211 10:17:56.441848 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 11 10:17:56 crc kubenswrapper[4663]: I1211 10:17:56.442061 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:56 crc kubenswrapper[4663]: I1211 10:17:56.443508 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:56 crc kubenswrapper[4663]: I1211 10:17:56.443556 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:56 crc kubenswrapper[4663]: I1211 10:17:56.443564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:56 crc kubenswrapper[4663]: I1211 10:17:56.551175 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:17:56 crc kubenswrapper[4663]: I1211 10:17:56.552312 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:17:56 crc kubenswrapper[4663]: I1211 10:17:56.552347 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:17:56 crc kubenswrapper[4663]: I1211 10:17:56.552358 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:17:58 crc kubenswrapper[4663]: I1211 10:17:58.513851 4663 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 11 10:17:58 crc kubenswrapper[4663]: I1211 10:17:58.514282 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 11 10:17:59 crc kubenswrapper[4663]: E1211 10:17:59.563144 4663 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 11 10:18:00 crc kubenswrapper[4663]: I1211 10:18:00.155320 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:18:00 crc kubenswrapper[4663]: I1211 10:18:00.155464 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:18:00 crc kubenswrapper[4663]: I1211 10:18:00.156356 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:00 crc kubenswrapper[4663]: I1211 10:18:00.156381 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:00 crc kubenswrapper[4663]: I1211 10:18:00.156391 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:00 crc kubenswrapper[4663]: I1211 10:18:00.903506 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:18:00 crc kubenswrapper[4663]: I1211 10:18:00.903633 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:18:00 crc kubenswrapper[4663]: I1211 10:18:00.904552 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:00 crc kubenswrapper[4663]: I1211 10:18:00.904602 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:00 crc kubenswrapper[4663]: I1211 10:18:00.904613 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:00 crc kubenswrapper[4663]: I1211 10:18:00.907316 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:18:01 crc kubenswrapper[4663]: I1211 10:18:01.223110 4663 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 11 10:18:01 crc kubenswrapper[4663]: I1211 10:18:01.223168 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 11 10:18:01 crc kubenswrapper[4663]: I1211 10:18:01.226786 4663 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 11 10:18:01 crc kubenswrapper[4663]: I1211 10:18:01.226817 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 11 10:18:01 crc kubenswrapper[4663]: I1211 10:18:01.561959 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:18:01 crc kubenswrapper[4663]: I1211 10:18:01.562782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:01 crc kubenswrapper[4663]: I1211 10:18:01.562824 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:01 crc kubenswrapper[4663]: I1211 10:18:01.562834 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:01 crc kubenswrapper[4663]: I1211 10:18:01.573236 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.130573 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.130787 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.131861 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.131905 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.131917 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.160717 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.565776 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.565779 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.570406 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.570447 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.570458 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.570520 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.570555 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.570576 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:02 crc kubenswrapper[4663]: I1211 10:18:02.580429 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 11 10:18:03 crc kubenswrapper[4663]: I1211 10:18:03.567866 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:18:03 crc kubenswrapper[4663]: I1211 10:18:03.568967 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:03 crc kubenswrapper[4663]: I1211 10:18:03.569002 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:03 crc kubenswrapper[4663]: I1211 10:18:03.569012 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:03 crc kubenswrapper[4663]: I1211 10:18:03.751751 4663 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 11 10:18:03 crc kubenswrapper[4663]: I1211 10:18:03.751809 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 11 10:18:04 crc kubenswrapper[4663]: I1211 10:18:04.426655 4663 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 11 10:18:04 crc kubenswrapper[4663]: I1211 10:18:04.426742 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 11 10:18:05 crc kubenswrapper[4663]: I1211 10:18:05.947583 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:18:05 crc kubenswrapper[4663]: I1211 10:18:05.947744 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:18:05 crc kubenswrapper[4663]: I1211 10:18:05.948084 4663 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 11 10:18:05 crc kubenswrapper[4663]: I1211 10:18:05.948118 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 11 10:18:05 crc kubenswrapper[4663]: I1211 10:18:05.948604 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:05 crc kubenswrapper[4663]: I1211 10:18:05.948639 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:05 crc kubenswrapper[4663]: I1211 10:18:05.948649 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:05 crc kubenswrapper[4663]: I1211 10:18:05.950255 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.224003 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.225386 4663 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.226579 4663 trace.go:236] Trace[1259779932]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 10:17:53.702) (total time: 12523ms): Dec 11 10:18:06 crc kubenswrapper[4663]: Trace[1259779932]: ---"Objects listed" error: 12523ms (10:18:06.226) Dec 11 10:18:06 crc kubenswrapper[4663]: Trace[1259779932]: [12.523890258s] [12.523890258s] END Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.226676 4663 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.226635 4663 trace.go:236] Trace[331218751]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 10:17:52.801) (total time: 13424ms): Dec 11 10:18:06 crc kubenswrapper[4663]: Trace[331218751]: ---"Objects listed" error: 13424ms (10:18:06.226) Dec 11 10:18:06 crc kubenswrapper[4663]: Trace[331218751]: [13.42473618s] [13.42473618s] END Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.226778 4663 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.227611 4663 trace.go:236] Trace[254631636]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 10:17:54.173) (total time: 12054ms): Dec 11 10:18:06 crc kubenswrapper[4663]: Trace[254631636]: ---"Objects listed" error: 12053ms (10:18:06.227) Dec 11 10:18:06 crc kubenswrapper[4663]: Trace[254631636]: [12.054142072s] [12.054142072s] END Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.227660 4663 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.229047 4663 trace.go:236] Trace[440955836]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 10:17:53.941) (total time: 12287ms): Dec 11 10:18:06 crc kubenswrapper[4663]: Trace[440955836]: ---"Objects listed" error: 12287ms (10:18:06.228) Dec 11 10:18:06 crc kubenswrapper[4663]: Trace[440955836]: [12.287784276s] [12.287784276s] END Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.229062 4663 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.231901 4663 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.233737 4663 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.249567 4663 csr.go:261] certificate signing request csr-5j6jw is approved, waiting to be issued Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.254489 4663 csr.go:257] certificate signing request csr-5j6jw is issued Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.257067 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.260366 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.463039 4663 apiserver.go:52] "Watching apiserver" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.466050 4663 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.466316 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.466609 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.466723 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.466753 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.466833 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.466725 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.466928 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.466967 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.467254 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.467285 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.468373 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.469165 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.469317 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.469412 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.469421 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.469423 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.469565 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.469622 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.469670 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.490866 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.503090 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.511130 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.518851 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.525931 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.533085 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.541838 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.548560 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.554821 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.561400 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.565938 4663 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.569687 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.577981 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.579380 4663 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.584022 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.584645 4663 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:33468->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.584706 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:33468->192.168.126.11:17697: read: connection reset by peer" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634058 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634103 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634126 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634155 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634175 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634207 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634225 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634241 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634259 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634275 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634289 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634306 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634321 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634343 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634357 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634375 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634393 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634417 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634440 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634434 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634456 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634518 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634530 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634544 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634617 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634639 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634660 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634680 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634695 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634706 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634710 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634747 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634768 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634797 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634817 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634834 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634850 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634870 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634924 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634943 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634964 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634982 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635003 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635021 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635047 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635080 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635100 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635117 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635133 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635148 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635165 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635198 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635219 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635236 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635253 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635270 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635304 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635323 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635338 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635354 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635379 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635395 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635430 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635445 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635461 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635481 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635513 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635555 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635572 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635588 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635603 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635630 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635645 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635676 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635693 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635709 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635730 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635750 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635787 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635806 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635823 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635839 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635859 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635877 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635896 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635914 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635934 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635953 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635971 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635990 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636005 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636022 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636039 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636060 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636076 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636091 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636106 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636123 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636140 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636156 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636215 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636237 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636257 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636274 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634720 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634728 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634891 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.634920 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635087 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635086 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635234 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635248 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635325 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635469 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635668 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635752 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635800 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635868 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635934 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.635950 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636003 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636042 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636101 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636445 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636484 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636116 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636152 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636172 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636227 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636249 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636286 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636437 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636554 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636620 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636655 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636706 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636727 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636728 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636787 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636825 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636908 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636935 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636953 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637048 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637079 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637119 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637173 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637407 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637442 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637493 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637587 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637667 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637696 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637789 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637796 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.637886 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638001 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638097 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638098 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638128 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638136 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.636292 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638219 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638240 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638256 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638270 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638274 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638320 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638348 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638370 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638388 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638406 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638432 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638451 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638468 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638484 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638487 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638552 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638575 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638594 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638611 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638628 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638631 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638649 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638684 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638707 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638721 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638736 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638818 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638837 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638853 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638870 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638887 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638905 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638921 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638937 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638951 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638966 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.638982 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639000 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639018 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639032 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639048 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639063 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639077 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639092 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639107 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639121 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639136 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639153 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639169 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.639271 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.640015 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.640593 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641420 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641447 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641476 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641495 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641522 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641537 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641567 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641585 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641601 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641616 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641635 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641650 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644727 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644758 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644776 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644815 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644831 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644849 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644863 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644904 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644925 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644940 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644975 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641097 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641273 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641374 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641579 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.641757 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.642161 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.642443 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.642835 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.642995 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.643058 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.643631 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.643826 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644017 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644033 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644332 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.643070 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644743 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.644887 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.645137 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.645170 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.645317 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.645393 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:18:07.145372628 +0000 UTC m=+17.963302402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.645620 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.645671 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.647326 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.647512 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.647721 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.647783 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.647801 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.648026 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.648262 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.648380 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.649840 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.649875 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.649895 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.649913 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.649932 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.649950 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.649965 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.649982 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.649998 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650015 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650031 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650050 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650047 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650067 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650085 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650102 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650121 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650141 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650158 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650174 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650209 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650241 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650257 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650272 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650287 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650471 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.650694 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.651150 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.651348 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.651714 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.652028 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.652163 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.652243 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.652263 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.652449 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.652608 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.652781 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.652902 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.653198 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.653512 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.653524 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.653719 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.653792 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.653813 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.653855 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.653892 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.653923 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.653891 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.653972 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.653975 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.654004 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.654067 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.654115 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:07.154098593 +0000 UTC m=+17.972028378 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.654299 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.654339 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.655870 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.655931 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.654749 4663 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.658175 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.658735 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.658783 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.659142 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.660460 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:07.16043667 +0000 UTC m=+17.978366453 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.660980 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.661164 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.662148 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.663007 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.663488 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.666925 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.669279 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.669751 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670019 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.657542 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670139 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670199 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670232 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670252 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670275 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670374 4663 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670394 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670405 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670416 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670425 4663 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670436 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670445 4663 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670455 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670463 4663 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670472 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670483 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670492 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670511 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670520 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670528 4663 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670536 4663 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670545 4663 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670554 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670563 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670571 4663 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670581 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670589 4663 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670597 4663 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670605 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670614 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670625 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670635 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670643 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670654 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670662 4663 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670671 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670680 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670688 4663 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670696 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670704 4663 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670712 4663 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670720 4663 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670728 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670736 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670744 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670752 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670761 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670769 4663 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670776 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670785 4663 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670793 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670802 4663 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670809 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670817 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670825 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670833 4663 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670840 4663 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670849 4663 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670857 4663 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670866 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670875 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670883 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670891 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670898 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670906 4663 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670916 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670925 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670938 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670961 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670970 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670978 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670987 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.670997 4663 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.671005 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.671014 4663 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.671023 4663 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.671031 4663 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.671040 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.671049 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.671062 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.671077 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.671086 4663 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.671095 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.673193 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.673285 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674170 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674265 4663 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674286 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674302 4663 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674315 4663 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674327 4663 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674335 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674344 4663 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674343 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674352 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674413 4663 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674429 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674444 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674455 4663 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674466 4663 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674477 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674487 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674510 4663 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674522 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674531 4663 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674541 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674541 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674551 4663 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674568 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674601 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674615 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674625 4663 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674634 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674643 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674652 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674661 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674671 4663 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674679 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674701 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674720 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674719 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674730 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674774 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674778 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674790 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674802 4663 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674813 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674823 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674833 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674842 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674852 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674861 4663 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674980 4663 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.674997 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.675009 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.675020 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.675029 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.675038 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.675048 4663 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.675058 4663 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.675066 4663 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.675077 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.675088 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.675068 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.676117 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.676265 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.676400 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.677060 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.677457 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.677610 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.677650 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.677674 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.677767 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:07.177739016 +0000 UTC m=+17.995668801 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.678235 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.678571 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.678834 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.679862 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.680449 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.690017 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.690118 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.690260 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.690580 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.690810 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.690978 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.691164 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.691566 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.691739 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.691989 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.692238 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.692249 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.692450 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.692489 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.692868 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.693219 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.694002 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.695119 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.695145 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.695159 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:06 crc kubenswrapper[4663]: E1211 10:18:06.695230 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:07.195208839 +0000 UTC m=+18.013138623 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.695300 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.696273 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.697731 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.697820 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.698123 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.698826 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.701363 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.701389 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.701947 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.704337 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.708541 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.709492 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-8f5qw"] Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.709841 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-8f5qw" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.709893 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.711118 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.711260 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.711284 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.715650 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.718875 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.719863 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.722823 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.725306 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.726005 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.726092 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.726391 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.727279 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.727346 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.727490 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.727457 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.727868 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.727876 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.732532 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.732570 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.732593 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.732817 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.733039 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.733314 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.733574 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.736157 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.744249 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.746609 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.759001 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.769767 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775416 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775466 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a7cf8637-c767-47eb-8d38-578340067566-hosts-file\") pod \"node-resolver-8f5qw\" (UID: \"a7cf8637-c767-47eb-8d38-578340067566\") " pod="openshift-dns/node-resolver-8f5qw" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775486 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775513 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-646rx\" (UniqueName: \"kubernetes.io/projected/a7cf8637-c767-47eb-8d38-578340067566-kube-api-access-646rx\") pod \"node-resolver-8f5qw\" (UID: \"a7cf8637-c767-47eb-8d38-578340067566\") " pod="openshift-dns/node-resolver-8f5qw" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775545 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775556 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775566 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775577 4663 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775572 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775589 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775647 4663 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775660 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775672 4663 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775681 4663 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775694 4663 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775703 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775712 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775721 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775726 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775730 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775757 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775767 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775776 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775787 4663 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775797 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775806 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775814 4663 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775823 4663 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775832 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775841 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775849 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775859 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775868 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775876 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775884 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775893 4663 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775901 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775911 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775919 4663 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775927 4663 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775934 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775944 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775951 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775959 4663 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775968 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775976 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775984 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.775993 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776000 4663 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776009 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776017 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776025 4663 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776032 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776042 4663 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776050 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776061 4663 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776069 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776078 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776087 4663 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776095 4663 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776103 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776111 4663 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776119 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776127 4663 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776134 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776141 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776149 4663 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776157 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776166 4663 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.776174 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.780839 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.783004 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.786308 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.790515 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.800365 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.819135 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.834478 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.845552 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.878668 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a7cf8637-c767-47eb-8d38-578340067566-hosts-file\") pod \"node-resolver-8f5qw\" (UID: \"a7cf8637-c767-47eb-8d38-578340067566\") " pod="openshift-dns/node-resolver-8f5qw" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.878767 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a7cf8637-c767-47eb-8d38-578340067566-hosts-file\") pod \"node-resolver-8f5qw\" (UID: \"a7cf8637-c767-47eb-8d38-578340067566\") " pod="openshift-dns/node-resolver-8f5qw" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.878787 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-646rx\" (UniqueName: \"kubernetes.io/projected/a7cf8637-c767-47eb-8d38-578340067566-kube-api-access-646rx\") pod \"node-resolver-8f5qw\" (UID: \"a7cf8637-c767-47eb-8d38-578340067566\") " pod="openshift-dns/node-resolver-8f5qw" Dec 11 10:18:06 crc kubenswrapper[4663]: I1211 10:18:06.900408 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-646rx\" (UniqueName: \"kubernetes.io/projected/a7cf8637-c767-47eb-8d38-578340067566-kube-api-access-646rx\") pod \"node-resolver-8f5qw\" (UID: \"a7cf8637-c767-47eb-8d38-578340067566\") " pod="openshift-dns/node-resolver-8f5qw" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.051399 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-8f5qw" Dec 11 10:18:07 crc kubenswrapper[4663]: W1211 10:18:07.060659 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7cf8637_c767_47eb_8d38_578340067566.slice/crio-b269aee8fce68a872a12cbe6c9f937f574f24ba07073b8564723fe1e6a66c4b8 WatchSource:0}: Error finding container b269aee8fce68a872a12cbe6c9f937f574f24ba07073b8564723fe1e6a66c4b8: Status 404 returned error can't find the container with id b269aee8fce68a872a12cbe6c9f937f574f24ba07073b8564723fe1e6a66c4b8 Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.062513 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-mq524"] Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.063037 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.064674 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.066875 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.068400 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.068912 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.070522 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.075277 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.080863 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/98941020-2764-490d-81db-9decd21aaa13-rootfs\") pod \"machine-config-daemon-mq524\" (UID: \"98941020-2764-490d-81db-9decd21aaa13\") " pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.080916 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/98941020-2764-490d-81db-9decd21aaa13-proxy-tls\") pod \"machine-config-daemon-mq524\" (UID: \"98941020-2764-490d-81db-9decd21aaa13\") " pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.080938 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/98941020-2764-490d-81db-9decd21aaa13-mcd-auth-proxy-config\") pod \"machine-config-daemon-mq524\" (UID: \"98941020-2764-490d-81db-9decd21aaa13\") " pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.080957 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46t4z\" (UniqueName: \"kubernetes.io/projected/98941020-2764-490d-81db-9decd21aaa13-kube-api-access-46t4z\") pod \"machine-config-daemon-mq524\" (UID: \"98941020-2764-490d-81db-9decd21aaa13\") " pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.083408 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.091616 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.099853 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.110467 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.118614 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.127780 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.133897 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.141810 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.150894 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.181312 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.181376 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.181400 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/98941020-2764-490d-81db-9decd21aaa13-proxy-tls\") pod \"machine-config-daemon-mq524\" (UID: \"98941020-2764-490d-81db-9decd21aaa13\") " pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.181421 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/98941020-2764-490d-81db-9decd21aaa13-mcd-auth-proxy-config\") pod \"machine-config-daemon-mq524\" (UID: \"98941020-2764-490d-81db-9decd21aaa13\") " pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.181445 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:18:08.181421379 +0000 UTC m=+18.999351163 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.181498 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46t4z\" (UniqueName: \"kubernetes.io/projected/98941020-2764-490d-81db-9decd21aaa13-kube-api-access-46t4z\") pod \"machine-config-daemon-mq524\" (UID: \"98941020-2764-490d-81db-9decd21aaa13\") " pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.181552 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/98941020-2764-490d-81db-9decd21aaa13-rootfs\") pod \"machine-config-daemon-mq524\" (UID: \"98941020-2764-490d-81db-9decd21aaa13\") " pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.181588 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.181607 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.181765 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.181795 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.181808 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.181842 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:08.181833685 +0000 UTC m=+18.999763469 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.181877 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/98941020-2764-490d-81db-9decd21aaa13-rootfs\") pod \"machine-config-daemon-mq524\" (UID: \"98941020-2764-490d-81db-9decd21aaa13\") " pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.181917 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.181939 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:08.181931715 +0000 UTC m=+18.999861499 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.182005 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.182032 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/98941020-2764-490d-81db-9decd21aaa13-mcd-auth-proxy-config\") pod \"machine-config-daemon-mq524\" (UID: \"98941020-2764-490d-81db-9decd21aaa13\") " pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.182079 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:08.182061574 +0000 UTC m=+18.999991357 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.184387 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/98941020-2764-490d-81db-9decd21aaa13-proxy-tls\") pod \"machine-config-daemon-mq524\" (UID: \"98941020-2764-490d-81db-9decd21aaa13\") " pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.197776 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46t4z\" (UniqueName: \"kubernetes.io/projected/98941020-2764-490d-81db-9decd21aaa13-kube-api-access-46t4z\") pod \"machine-config-daemon-mq524\" (UID: \"98941020-2764-490d-81db-9decd21aaa13\") " pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.256111 4663 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-11 10:13:06 +0000 UTC, rotation deadline is 2026-11-02 22:18:14.426429583 +0000 UTC Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.256163 4663 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7836h0m7.170269766s for next certificate rotation Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.282687 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.282845 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.282874 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.282885 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.282942 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:08.282926363 +0000 UTC m=+19.100856147 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.374644 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:18:07 crc kubenswrapper[4663]: W1211 10:18:07.397162 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98941020_2764_490d_81db_9decd21aaa13.slice/crio-0211d44501b196ff69aeafce132a978cb903ee2ff009d54c4a954ae6f5f5c1b5 WatchSource:0}: Error finding container 0211d44501b196ff69aeafce132a978cb903ee2ff009d54c4a954ae6f5f5c1b5: Status 404 returned error can't find the container with id 0211d44501b196ff69aeafce132a978cb903ee2ff009d54c4a954ae6f5f5c1b5 Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.417163 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-qv7jl"] Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.417542 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.418653 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.419116 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.418995 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.419284 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.419480 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.421046 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-z7882"] Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.421775 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.423438 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.423807 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-4g9zj"] Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.424339 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.424775 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.424835 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.425231 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.425254 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.425364 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.425575 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.425864 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.426078 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.436994 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.458171 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.470420 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483513 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-multus-socket-dir-parent\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483540 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-var-lib-openvswitch\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483556 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-run-ovn-kubernetes\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483581 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-multus-daemon-config\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483598 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-run-netns\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483612 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxx7n\" (UniqueName: \"kubernetes.io/projected/4f739f0f-4546-4db5-809d-435823288bd8-kube-api-access-lxx7n\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483626 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/27da1f1a-7a92-4c81-9889-21b44ea62db0-system-cni-dir\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483640 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/27da1f1a-7a92-4c81-9889-21b44ea62db0-tuning-conf-dir\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483667 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-cni-binary-copy\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483682 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-var-lib-kubelet\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483696 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-systemd-units\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483710 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-cni-netd\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483723 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-var-lib-cni-multus\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483737 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-etc-kubernetes\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483751 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/27da1f1a-7a92-4c81-9889-21b44ea62db0-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483766 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-log-socket\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483780 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483795 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/27da1f1a-7a92-4c81-9889-21b44ea62db0-os-release\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483810 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/27da1f1a-7a92-4c81-9889-21b44ea62db0-cni-binary-copy\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483824 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-multus-cni-dir\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483840 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-var-lib-cni-bin\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483855 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-run-netns\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483870 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-env-overrides\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483884 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/27da1f1a-7a92-4c81-9889-21b44ea62db0-cnibin\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483899 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dllpp\" (UniqueName: \"kubernetes.io/projected/27da1f1a-7a92-4c81-9889-21b44ea62db0-kube-api-access-dllpp\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483918 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-run-k8s-cni-cncf-io\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483933 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-openvswitch\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483947 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-ovnkube-script-lib\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483962 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-system-cni-dir\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483975 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-ovnkube-config\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.483998 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-os-release\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484012 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-systemd\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484028 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-etc-openvswitch\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484042 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-ovn\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484057 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4f739f0f-4546-4db5-809d-435823288bd8-ovn-node-metrics-cert\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484126 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-cni-bin\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484196 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-hostroot\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484217 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-multus-conf-dir\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484243 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c2nm\" (UniqueName: \"kubernetes.io/projected/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-kube-api-access-4c2nm\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484271 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-slash\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484295 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-kubelet\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484358 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-cnibin\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484382 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-run-multus-certs\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.484403 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-node-log\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.487232 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.503205 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.504281 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:07 crc kubenswrapper[4663]: E1211 10:18:07.504364 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.507492 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.508119 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.508775 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.509366 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.509887 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.511323 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.511992 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.512558 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.513537 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.513996 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.514818 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.515452 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.517248 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.517746 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.520545 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.520883 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.521389 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.521898 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.522668 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.523165 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.524095 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.524593 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.525117 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.526002 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.527671 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.528528 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.529102 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.530062 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.530538 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.531375 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.531802 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.532290 4663 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.532386 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.534237 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.534912 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.535493 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.537412 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.538850 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.539501 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.540637 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.541361 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.542276 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.542945 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.543662 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.544134 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.545217 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.545732 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.546340 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.547313 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.548409 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.548934 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.549491 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.550507 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.551063 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.552322 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.553012 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.557742 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.567709 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.576708 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-8f5qw" event={"ID":"a7cf8637-c767-47eb-8d38-578340067566","Type":"ContainerStarted","Data":"3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537"} Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.576754 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-8f5qw" event={"ID":"a7cf8637-c767-47eb-8d38-578340067566","Type":"ContainerStarted","Data":"b269aee8fce68a872a12cbe6c9f937f574f24ba07073b8564723fe1e6a66c4b8"} Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.577916 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980"} Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.577973 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"591c293579c7c06a1b765fb2d7b977e1e77e1b4b473681409bffb67f3a8e8418"} Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.578447 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.578578 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"0ea3c38bfb57995847b835393a79bbc994237aeb8b12238113781a39c87318e8"} Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.579691 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.581082 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e" exitCode=255 Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.581144 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e"} Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.581643 4663 scope.go:117] "RemoveContainer" containerID="0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.582609 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerStarted","Data":"2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529"} Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.582642 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerStarted","Data":"0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b"} Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.582655 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerStarted","Data":"0211d44501b196ff69aeafce132a978cb903ee2ff009d54c4a954ae6f5f5c1b5"} Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584760 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-multus-socket-dir-parent\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584770 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6"} Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584790 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-var-lib-openvswitch\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584810 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-run-ovn-kubernetes\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584813 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580"} Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584826 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6fc9f176e366eb8d54b4a40166cab8472d248359dabf1f5271cbf6387f954bc8"} Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584834 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-multus-daemon-config\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584883 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-run-netns\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584899 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-systemd-units\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584919 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxx7n\" (UniqueName: \"kubernetes.io/projected/4f739f0f-4546-4db5-809d-435823288bd8-kube-api-access-lxx7n\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584950 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/27da1f1a-7a92-4c81-9889-21b44ea62db0-system-cni-dir\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584951 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-var-lib-openvswitch\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584970 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/27da1f1a-7a92-4c81-9889-21b44ea62db0-tuning-conf-dir\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584986 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-cni-binary-copy\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.584914 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-run-ovn-kubernetes\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585058 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-var-lib-kubelet\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585034 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-var-lib-kubelet\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585110 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-cni-netd\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585134 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-var-lib-cni-multus\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585142 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-systemd-units\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585177 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-etc-kubernetes\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585218 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-run-netns\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585252 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-multus-socket-dir-parent\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585153 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-etc-kubernetes\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585318 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-var-lib-cni-multus\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585328 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/27da1f1a-7a92-4c81-9889-21b44ea62db0-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585229 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/27da1f1a-7a92-4c81-9889-21b44ea62db0-system-cni-dir\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585353 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-cni-netd\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585413 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-log-socket\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585494 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-log-socket\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585656 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585704 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/27da1f1a-7a92-4c81-9889-21b44ea62db0-tuning-conf-dir\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585775 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-multus-daemon-config\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585826 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-cni-binary-copy\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585856 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585883 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/27da1f1a-7a92-4c81-9889-21b44ea62db0-os-release\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585936 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/27da1f1a-7a92-4c81-9889-21b44ea62db0-cni-binary-copy\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585962 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-multus-cni-dir\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585982 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-var-lib-cni-bin\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.585998 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-run-netns\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586014 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-env-overrides\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586030 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-ovnkube-script-lib\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586052 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/27da1f1a-7a92-4c81-9889-21b44ea62db0-cnibin\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586073 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dllpp\" (UniqueName: \"kubernetes.io/projected/27da1f1a-7a92-4c81-9889-21b44ea62db0-kube-api-access-dllpp\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586094 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-run-k8s-cni-cncf-io\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586109 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-var-lib-cni-bin\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586130 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-multus-cni-dir\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586138 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-openvswitch\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586114 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-openvswitch\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586169 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/27da1f1a-7a92-4c81-9889-21b44ea62db0-cnibin\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586213 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/27da1f1a-7a92-4c81-9889-21b44ea62db0-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586222 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-system-cni-dir\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586257 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-system-cni-dir\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586279 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-ovn\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586307 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-ovnkube-config\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586361 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-os-release\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586377 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-systemd\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586397 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-etc-openvswitch\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586411 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4f739f0f-4546-4db5-809d-435823288bd8-ovn-node-metrics-cert\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586433 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-slash\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586456 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-cni-bin\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586546 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-hostroot\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586567 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-multus-conf-dir\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586590 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c2nm\" (UniqueName: \"kubernetes.io/projected/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-kube-api-access-4c2nm\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586610 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-kubelet\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586633 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-etc-openvswitch\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586642 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-cnibin\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586679 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-cnibin\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586691 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-run-multus-certs\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586712 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-node-log\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586764 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-node-log\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586789 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-ovn\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586789 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/27da1f1a-7a92-4c81-9889-21b44ea62db0-cni-binary-copy\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586811 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-systemd\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586029 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/27da1f1a-7a92-4c81-9889-21b44ea62db0-os-release\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586838 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-os-release\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586848 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-run-multus-certs\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586851 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-hostroot\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586865 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-env-overrides\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586890 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-cni-bin\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586066 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-run-netns\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586930 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-host-run-k8s-cni-cncf-io\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586954 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-kubelet\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586866 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-slash\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.586977 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-ovnkube-script-lib\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.587021 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-multus-conf-dir\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.587410 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-ovnkube-config\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.591539 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.593267 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4f739f0f-4546-4db5-809d-435823288bd8-ovn-node-metrics-cert\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.602905 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxx7n\" (UniqueName: \"kubernetes.io/projected/4f739f0f-4546-4db5-809d-435823288bd8-kube-api-access-lxx7n\") pod \"ovnkube-node-z7882\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.603008 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c2nm\" (UniqueName: \"kubernetes.io/projected/43cbb02f-aa8f-41aa-bd0a-8042aca3002c-kube-api-access-4c2nm\") pod \"multus-qv7jl\" (UID: \"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\") " pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.603385 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dllpp\" (UniqueName: \"kubernetes.io/projected/27da1f1a-7a92-4c81-9889-21b44ea62db0-kube-api-access-dllpp\") pod \"multus-additional-cni-plugins-4g9zj\" (UID: \"27da1f1a-7a92-4c81-9889-21b44ea62db0\") " pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.608128 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.617429 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.630829 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.639847 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.651583 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.661431 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.688060 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.710174 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.730236 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.737931 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-qv7jl" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.746059 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.760827 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.765560 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.771669 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.783089 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:07 crc kubenswrapper[4663]: W1211 10:18:07.792812 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f739f0f_4546_4db5_809d_435823288bd8.slice/crio-b536d6d362cb3ab7fc77a155b44aba445d87975bf29d5587132739459136a578 WatchSource:0}: Error finding container b536d6d362cb3ab7fc77a155b44aba445d87975bf29d5587132739459136a578: Status 404 returned error can't find the container with id b536d6d362cb3ab7fc77a155b44aba445d87975bf29d5587132739459136a578 Dec 11 10:18:07 crc kubenswrapper[4663]: I1211 10:18:07.795425 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:07Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.194885 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.195065 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:18:10.195041349 +0000 UTC m=+21.012971133 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.195343 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.195372 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.195396 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.195489 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.195523 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.195537 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:10.195529294 +0000 UTC m=+21.013459078 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.195568 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:10.195556695 +0000 UTC m=+21.013486479 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.195620 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.195660 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.195677 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.195750 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:10.195729322 +0000 UTC m=+21.013659105 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.296487 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.296666 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.296705 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.296719 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.296787 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:10.296770234 +0000 UTC m=+21.114700018 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.503716 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.503820 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.503869 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:08 crc kubenswrapper[4663]: E1211 10:18:08.503951 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.592028 4663 generic.go:334] "Generic (PLEG): container finished" podID="27da1f1a-7a92-4c81-9889-21b44ea62db0" containerID="9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77" exitCode=0 Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.592119 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" event={"ID":"27da1f1a-7a92-4c81-9889-21b44ea62db0","Type":"ContainerDied","Data":"9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77"} Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.592199 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" event={"ID":"27da1f1a-7a92-4c81-9889-21b44ea62db0","Type":"ContainerStarted","Data":"82ecb097ac6760b3d017d39dda24073c098f630ffdb26d5bb977811c4307136b"} Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.594163 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022" exitCode=0 Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.594273 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022"} Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.594313 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerStarted","Data":"b536d6d362cb3ab7fc77a155b44aba445d87975bf29d5587132739459136a578"} Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.597054 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-qv7jl" event={"ID":"43cbb02f-aa8f-41aa-bd0a-8042aca3002c","Type":"ContainerStarted","Data":"fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c"} Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.597087 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-qv7jl" event={"ID":"43cbb02f-aa8f-41aa-bd0a-8042aca3002c","Type":"ContainerStarted","Data":"5cb09e456e7f1d6e04c3374d1458e4056f43929d4f6d82ac5f94fd0f4a9438fe"} Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.599274 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.601834 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f"} Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.602172 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.605283 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.620396 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.634984 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.646379 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.664608 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.679304 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.709731 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.729060 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.741338 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.753580 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.764151 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.777254 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.794419 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.806107 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.814930 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.828757 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.838708 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.851739 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.860272 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.869037 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.879943 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.889634 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.901360 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.911349 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.921512 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:08 crc kubenswrapper[4663]: I1211 10:18:08.933025 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:08Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.386626 4663 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.426142 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.427930 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.427970 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.427981 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.428097 4663 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.433238 4663 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.433472 4663 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.435914 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.435972 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.436527 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.436547 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.436559 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:09Z","lastTransitionTime":"2025-12-11T10:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:09 crc kubenswrapper[4663]: E1211 10:18:09.446835 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.449823 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.449852 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.449861 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.449876 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.449887 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:09Z","lastTransitionTime":"2025-12-11T10:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:09 crc kubenswrapper[4663]: E1211 10:18:09.460943 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.463522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.463553 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.463562 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.463572 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.463581 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:09Z","lastTransitionTime":"2025-12-11T10:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:09 crc kubenswrapper[4663]: E1211 10:18:09.471537 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.474087 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.474115 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.474124 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.474134 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.474142 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:09Z","lastTransitionTime":"2025-12-11T10:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:09 crc kubenswrapper[4663]: E1211 10:18:09.482287 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.485116 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.485148 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.485158 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.485220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.485233 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:09Z","lastTransitionTime":"2025-12-11T10:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:09 crc kubenswrapper[4663]: E1211 10:18:09.493534 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: E1211 10:18:09.493649 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.494970 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.495001 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.495015 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.495032 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.495041 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:09Z","lastTransitionTime":"2025-12-11T10:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.504792 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:09 crc kubenswrapper[4663]: E1211 10:18:09.504960 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.516847 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.528197 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.538731 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.548838 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.558444 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.572617 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.587247 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.596339 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.596380 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.596390 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.596405 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.596415 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:09Z","lastTransitionTime":"2025-12-11T10:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.597021 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.605704 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.607515 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerStarted","Data":"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.607548 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerStarted","Data":"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.607560 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerStarted","Data":"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.607571 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerStarted","Data":"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.607580 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerStarted","Data":"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.607590 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerStarted","Data":"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.608946 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.610497 4663 generic.go:334] "Generic (PLEG): container finished" podID="27da1f1a-7a92-4c81-9889-21b44ea62db0" containerID="9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c" exitCode=0 Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.610566 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" event={"ID":"27da1f1a-7a92-4c81-9889-21b44ea62db0","Type":"ContainerDied","Data":"9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.615714 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.626571 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.628143 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-8lh4c"] Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.628546 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-8lh4c" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.630385 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.630539 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.630583 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.630710 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.636243 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.649360 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.656998 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.666147 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.676209 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.690265 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.699918 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.699952 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.699961 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.699979 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.699991 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:09Z","lastTransitionTime":"2025-12-11T10:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.701174 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.710148 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0724e8ea-646f-422e-bc21-39189bddba17-serviceca\") pod \"node-ca-8lh4c\" (UID: \"0724e8ea-646f-422e-bc21-39189bddba17\") " pod="openshift-image-registry/node-ca-8lh4c" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.710213 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0724e8ea-646f-422e-bc21-39189bddba17-host\") pod \"node-ca-8lh4c\" (UID: \"0724e8ea-646f-422e-bc21-39189bddba17\") " pod="openshift-image-registry/node-ca-8lh4c" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.710234 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4cn4\" (UniqueName: \"kubernetes.io/projected/0724e8ea-646f-422e-bc21-39189bddba17-kube-api-access-s4cn4\") pod \"node-ca-8lh4c\" (UID: \"0724e8ea-646f-422e-bc21-39189bddba17\") " pod="openshift-image-registry/node-ca-8lh4c" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.712130 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.722945 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.732891 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.748416 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.757801 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.766212 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.775445 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.783507 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.795030 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.803776 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.803859 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.803873 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.803900 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.803913 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:09Z","lastTransitionTime":"2025-12-11T10:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.811782 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0724e8ea-646f-422e-bc21-39189bddba17-serviceca\") pod \"node-ca-8lh4c\" (UID: \"0724e8ea-646f-422e-bc21-39189bddba17\") " pod="openshift-image-registry/node-ca-8lh4c" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.812043 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0724e8ea-646f-422e-bc21-39189bddba17-host\") pod \"node-ca-8lh4c\" (UID: \"0724e8ea-646f-422e-bc21-39189bddba17\") " pod="openshift-image-registry/node-ca-8lh4c" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.812496 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0724e8ea-646f-422e-bc21-39189bddba17-host\") pod \"node-ca-8lh4c\" (UID: \"0724e8ea-646f-422e-bc21-39189bddba17\") " pod="openshift-image-registry/node-ca-8lh4c" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.812585 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4cn4\" (UniqueName: \"kubernetes.io/projected/0724e8ea-646f-422e-bc21-39189bddba17-kube-api-access-s4cn4\") pod \"node-ca-8lh4c\" (UID: \"0724e8ea-646f-422e-bc21-39189bddba17\") " pod="openshift-image-registry/node-ca-8lh4c" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.814301 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0724e8ea-646f-422e-bc21-39189bddba17-serviceca\") pod \"node-ca-8lh4c\" (UID: \"0724e8ea-646f-422e-bc21-39189bddba17\") " pod="openshift-image-registry/node-ca-8lh4c" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.830425 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4cn4\" (UniqueName: \"kubernetes.io/projected/0724e8ea-646f-422e-bc21-39189bddba17-kube-api-access-s4cn4\") pod \"node-ca-8lh4c\" (UID: \"0724e8ea-646f-422e-bc21-39189bddba17\") " pod="openshift-image-registry/node-ca-8lh4c" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.906442 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.906488 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.906497 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.906530 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.906540 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:09Z","lastTransitionTime":"2025-12-11T10:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:09 crc kubenswrapper[4663]: I1211 10:18:09.941036 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-8lh4c" Dec 11 10:18:09 crc kubenswrapper[4663]: W1211 10:18:09.954671 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0724e8ea_646f_422e_bc21_39189bddba17.slice/crio-89b56283d6f045716a6054940cd5ad3fba9cdb0f949c4128bed2f241fa783a27 WatchSource:0}: Error finding container 89b56283d6f045716a6054940cd5ad3fba9cdb0f949c4128bed2f241fa783a27: Status 404 returned error can't find the container with id 89b56283d6f045716a6054940cd5ad3fba9cdb0f949c4128bed2f241fa783a27 Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.009169 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.009582 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.009594 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.009613 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.009627 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:10Z","lastTransitionTime":"2025-12-11T10:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.112617 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.112680 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.112691 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.112727 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.112739 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:10Z","lastTransitionTime":"2025-12-11T10:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.215616 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.215665 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.215679 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.215699 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.215709 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:10Z","lastTransitionTime":"2025-12-11T10:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.215981 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.216139 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.216172 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.216235 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.216289 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:18:14.216243073 +0000 UTC m=+25.034172857 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.216323 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.216357 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.216383 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.216395 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.216331 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.216435 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:14.216410941 +0000 UTC m=+25.034340725 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.216465 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:14.216454572 +0000 UTC m=+25.034384355 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.216493 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:14.216482111 +0000 UTC m=+25.034411896 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.317501 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.317878 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.317924 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.317941 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.318032 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:14.318007233 +0000 UTC m=+25.135937008 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.319309 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.319381 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.319392 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.319419 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.319431 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:10Z","lastTransitionTime":"2025-12-11T10:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.422385 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.422421 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.422432 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.422446 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.422457 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:10Z","lastTransitionTime":"2025-12-11T10:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.503993 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.504024 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.504433 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:10 crc kubenswrapper[4663]: E1211 10:18:10.504527 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.525107 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.525237 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.525300 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.525421 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.525487 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:10Z","lastTransitionTime":"2025-12-11T10:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.617930 4663 generic.go:334] "Generic (PLEG): container finished" podID="27da1f1a-7a92-4c81-9889-21b44ea62db0" containerID="5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834" exitCode=0 Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.618036 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" event={"ID":"27da1f1a-7a92-4c81-9889-21b44ea62db0","Type":"ContainerDied","Data":"5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834"} Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.620957 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-8lh4c" event={"ID":"0724e8ea-646f-422e-bc21-39189bddba17","Type":"ContainerStarted","Data":"14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175"} Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.620995 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-8lh4c" event={"ID":"0724e8ea-646f-422e-bc21-39189bddba17","Type":"ContainerStarted","Data":"89b56283d6f045716a6054940cd5ad3fba9cdb0f949c4128bed2f241fa783a27"} Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.627817 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.627965 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.628028 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.628097 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.628172 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:10Z","lastTransitionTime":"2025-12-11T10:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.633831 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.646367 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.658339 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.672833 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.685618 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.694831 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.706468 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.716836 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.730573 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.730614 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.730627 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.730641 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.730652 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:10Z","lastTransitionTime":"2025-12-11T10:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.730518 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.742125 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.751267 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.759919 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.769024 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.778104 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.787354 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.801336 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.810458 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.817944 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.826513 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.833327 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.833372 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.833382 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.833398 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.833412 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:10Z","lastTransitionTime":"2025-12-11T10:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.838521 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.848230 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.856682 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.865009 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.873667 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.883162 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.891362 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.901428 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.931694 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:10Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.935953 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.935995 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.936008 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.936025 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:10 crc kubenswrapper[4663]: I1211 10:18:10.936035 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:10Z","lastTransitionTime":"2025-12-11T10:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.038171 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.038222 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.038230 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.038244 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.038253 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:11Z","lastTransitionTime":"2025-12-11T10:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.141170 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.141246 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.141255 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.141273 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.141287 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:11Z","lastTransitionTime":"2025-12-11T10:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.243841 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.243882 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.243894 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.243913 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.243924 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:11Z","lastTransitionTime":"2025-12-11T10:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.346720 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.346776 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.346785 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.346801 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.346812 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:11Z","lastTransitionTime":"2025-12-11T10:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.450450 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.450499 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.450511 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.450525 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.450537 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:11Z","lastTransitionTime":"2025-12-11T10:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.509029 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:11 crc kubenswrapper[4663]: E1211 10:18:11.509196 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.552695 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.552737 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.552749 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.552766 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.552778 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:11Z","lastTransitionTime":"2025-12-11T10:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.628147 4663 generic.go:334] "Generic (PLEG): container finished" podID="27da1f1a-7a92-4c81-9889-21b44ea62db0" containerID="7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604" exitCode=0 Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.628238 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" event={"ID":"27da1f1a-7a92-4c81-9889-21b44ea62db0","Type":"ContainerDied","Data":"7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604"} Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.634047 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerStarted","Data":"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e"} Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.650686 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.654982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.655005 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.655016 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.655031 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.655041 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:11Z","lastTransitionTime":"2025-12-11T10:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.664115 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.673493 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.683642 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.693063 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.701602 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.711548 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.721550 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.728644 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.737906 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.748824 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.758324 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.758761 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.758821 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.758853 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.758879 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.758891 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:11Z","lastTransitionTime":"2025-12-11T10:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.773782 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.789571 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.861784 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.861813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.861822 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.861835 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.861845 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:11Z","lastTransitionTime":"2025-12-11T10:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.964566 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.964962 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.964974 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.964997 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:11 crc kubenswrapper[4663]: I1211 10:18:11.965009 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:11Z","lastTransitionTime":"2025-12-11T10:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.068309 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.068358 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.068373 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.068393 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.068404 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:12Z","lastTransitionTime":"2025-12-11T10:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.170472 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.170515 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.170526 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.170541 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.170552 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:12Z","lastTransitionTime":"2025-12-11T10:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.272783 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.272815 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.272824 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.272842 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.272854 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:12Z","lastTransitionTime":"2025-12-11T10:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.374722 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.374755 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.374763 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.374780 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.374791 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:12Z","lastTransitionTime":"2025-12-11T10:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.477013 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.477074 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.477084 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.477106 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.477118 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:12Z","lastTransitionTime":"2025-12-11T10:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.504146 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.504278 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:12 crc kubenswrapper[4663]: E1211 10:18:12.504371 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:12 crc kubenswrapper[4663]: E1211 10:18:12.504430 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.579910 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.579952 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.579964 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.579987 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.579996 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:12Z","lastTransitionTime":"2025-12-11T10:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.640271 4663 generic.go:334] "Generic (PLEG): container finished" podID="27da1f1a-7a92-4c81-9889-21b44ea62db0" containerID="ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5" exitCode=0 Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.640335 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" event={"ID":"27da1f1a-7a92-4c81-9889-21b44ea62db0","Type":"ContainerDied","Data":"ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5"} Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.653798 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.666236 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.677162 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.681369 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.681413 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.681423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.681437 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.681447 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:12Z","lastTransitionTime":"2025-12-11T10:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.689520 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.697309 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.706910 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.716529 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.726217 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.740346 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.754565 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.764681 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.775667 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.783817 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.783847 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.783857 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.783875 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.783888 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:12Z","lastTransitionTime":"2025-12-11T10:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.787451 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.797855 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:12Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.885824 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.885863 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.885873 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.885886 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.885894 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:12Z","lastTransitionTime":"2025-12-11T10:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.989782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.989818 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.989826 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.989840 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:12 crc kubenswrapper[4663]: I1211 10:18:12.989849 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:12Z","lastTransitionTime":"2025-12-11T10:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.093037 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.093106 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.093118 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.093141 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.093155 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:13Z","lastTransitionTime":"2025-12-11T10:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.195961 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.195999 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.196009 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.196024 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.196034 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:13Z","lastTransitionTime":"2025-12-11T10:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.297991 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.298176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.298213 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.298227 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.298236 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:13Z","lastTransitionTime":"2025-12-11T10:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.399758 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.399793 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.399804 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.399820 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.399831 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:13Z","lastTransitionTime":"2025-12-11T10:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.503398 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.503435 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.503444 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.503459 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.503470 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:13Z","lastTransitionTime":"2025-12-11T10:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.503761 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:13 crc kubenswrapper[4663]: E1211 10:18:13.503841 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.606092 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.606133 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.606145 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.606161 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.606171 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:13Z","lastTransitionTime":"2025-12-11T10:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.647173 4663 generic.go:334] "Generic (PLEG): container finished" podID="27da1f1a-7a92-4c81-9889-21b44ea62db0" containerID="28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8" exitCode=0 Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.647262 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" event={"ID":"27da1f1a-7a92-4c81-9889-21b44ea62db0","Type":"ContainerDied","Data":"28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8"} Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.650465 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerStarted","Data":"3becd628b56db394b1c90e56b11dd31accc93eb44c3d73b230c24f12c7a1cd02"} Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.650669 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.650701 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.664486 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.669884 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.669990 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.673329 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.688279 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.698272 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.708062 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.708105 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.708114 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.708129 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.708138 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:13Z","lastTransitionTime":"2025-12-11T10:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.709776 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.719723 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.728686 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.740565 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.767787 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.789756 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.802504 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.810340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.810378 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.810388 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.810405 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.810415 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:13Z","lastTransitionTime":"2025-12-11T10:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.812826 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.825035 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.832741 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.842997 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.851622 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.865724 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3becd628b56db394b1c90e56b11dd31accc93eb44c3d73b230c24f12c7a1cd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.875051 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.884219 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.891574 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.899826 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.909127 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.912093 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.912123 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.912132 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.912147 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.912156 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:13Z","lastTransitionTime":"2025-12-11T10:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.918123 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.928126 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.937425 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.945834 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.955710 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:13 crc kubenswrapper[4663]: I1211 10:18:13.962863 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:13Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.014948 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.014993 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.015027 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.015057 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.015069 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:14Z","lastTransitionTime":"2025-12-11T10:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.117117 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.117216 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.117227 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.117244 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.117258 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:14Z","lastTransitionTime":"2025-12-11T10:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.220240 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.220283 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.220293 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.220308 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.220317 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:14Z","lastTransitionTime":"2025-12-11T10:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.253090 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.253240 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.253265 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.253299 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:18:22.253278977 +0000 UTC m=+33.071208771 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.253337 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.253379 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.253397 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.253402 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.253411 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.253336 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.253399 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:22.253387466 +0000 UTC m=+33.071317260 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.253499 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:22.253473905 +0000 UTC m=+33.071403688 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.253522 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:22.253514759 +0000 UTC m=+33.071444543 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.322428 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.322460 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.322470 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.322483 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.322493 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:14Z","lastTransitionTime":"2025-12-11T10:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.354200 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.354358 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.354383 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.354394 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.354444 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:22.354430803 +0000 UTC m=+33.172360587 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.425327 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.425388 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.425401 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.425423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.425434 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:14Z","lastTransitionTime":"2025-12-11T10:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.504453 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.504487 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.504573 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:14 crc kubenswrapper[4663]: E1211 10:18:14.504673 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.527944 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.527990 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.528000 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.528022 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.528034 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:14Z","lastTransitionTime":"2025-12-11T10:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.630796 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.630833 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.630844 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.630861 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.630873 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:14Z","lastTransitionTime":"2025-12-11T10:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.658305 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" event={"ID":"27da1f1a-7a92-4c81-9889-21b44ea62db0","Type":"ContainerStarted","Data":"dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb"} Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.658390 4663 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.675875 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.686955 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.702556 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3becd628b56db394b1c90e56b11dd31accc93eb44c3d73b230c24f12c7a1cd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.712675 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.722421 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.730605 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.733625 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.733670 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.733680 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.733704 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.733717 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:14Z","lastTransitionTime":"2025-12-11T10:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.744849 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.754257 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.767827 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.785984 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.798723 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.814886 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.830045 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.835614 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.835645 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.835656 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.835670 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.835679 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:14Z","lastTransitionTime":"2025-12-11T10:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.840575 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:14Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.937796 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.937848 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.937859 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.937874 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:14 crc kubenswrapper[4663]: I1211 10:18:14.937882 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:14Z","lastTransitionTime":"2025-12-11T10:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.040233 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.040294 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.040305 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.040319 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.040328 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:15Z","lastTransitionTime":"2025-12-11T10:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.142221 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.142269 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.142281 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.142306 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.142318 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:15Z","lastTransitionTime":"2025-12-11T10:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.245278 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.245540 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.245548 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.245561 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.245570 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:15Z","lastTransitionTime":"2025-12-11T10:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.347982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.348013 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.348021 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.348033 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.348042 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:15Z","lastTransitionTime":"2025-12-11T10:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.450347 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.450383 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.450391 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.450403 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.450412 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:15Z","lastTransitionTime":"2025-12-11T10:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.504268 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:15 crc kubenswrapper[4663]: E1211 10:18:15.504418 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.552975 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.553020 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.553029 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.553042 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.553051 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:15Z","lastTransitionTime":"2025-12-11T10:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.655522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.655558 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.655568 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.655581 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.655591 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:15Z","lastTransitionTime":"2025-12-11T10:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.662210 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/0.log" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.664555 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="3becd628b56db394b1c90e56b11dd31accc93eb44c3d73b230c24f12c7a1cd02" exitCode=1 Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.664596 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"3becd628b56db394b1c90e56b11dd31accc93eb44c3d73b230c24f12c7a1cd02"} Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.665143 4663 scope.go:117] "RemoveContainer" containerID="3becd628b56db394b1c90e56b11dd31accc93eb44c3d73b230c24f12c7a1cd02" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.676103 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.685757 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.701351 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3becd628b56db394b1c90e56b11dd31accc93eb44c3d73b230c24f12c7a1cd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3becd628b56db394b1c90e56b11dd31accc93eb44c3d73b230c24f12c7a1cd02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:15Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 10:18:15.310864 5951 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1211 10:18:15.310936 5951 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1211 10:18:15.310942 5951 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1211 10:18:15.310953 5951 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1211 10:18:15.310956 5951 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1211 10:18:15.310972 5951 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1211 10:18:15.311000 5951 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1211 10:18:15.311012 5951 factory.go:656] Stopping watch factory\\\\nI1211 10:18:15.311026 5951 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1211 10:18:15.311080 5951 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1211 10:18:15.311088 5951 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1211 10:18:15.311093 5951 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1211 10:18:15.311098 5951 handler.go:208] Removed *v1.Node event handler 2\\\\nI1211 10:18:15.311119 5951 handler.go:208] Removed *v1.Node event handler 7\\\\nI1211 10:18:15.311126 5951 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.713103 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.723046 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.731927 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.740758 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.750490 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.758497 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.758532 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.758541 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.758556 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.758566 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:15Z","lastTransitionTime":"2025-12-11T10:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.760515 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.771617 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.780742 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.790575 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.801211 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.808924 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:15Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.861274 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.861320 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.861329 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.861347 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.861356 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:15Z","lastTransitionTime":"2025-12-11T10:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.964209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.964244 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.964259 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.964274 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:15 crc kubenswrapper[4663]: I1211 10:18:15.964284 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:15Z","lastTransitionTime":"2025-12-11T10:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.066317 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.066352 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.066360 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.066375 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.066384 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:16Z","lastTransitionTime":"2025-12-11T10:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.169178 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.169268 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.169279 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.169291 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.169302 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:16Z","lastTransitionTime":"2025-12-11T10:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.271208 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.271250 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.271260 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.271272 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.271281 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:16Z","lastTransitionTime":"2025-12-11T10:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.373835 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.373882 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.373892 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.373914 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.373925 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:16Z","lastTransitionTime":"2025-12-11T10:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.476109 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.476157 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.476166 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.476205 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.476215 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:16Z","lastTransitionTime":"2025-12-11T10:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.504660 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.504682 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:16 crc kubenswrapper[4663]: E1211 10:18:16.504800 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:16 crc kubenswrapper[4663]: E1211 10:18:16.504925 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.577936 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.577971 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.577980 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.577997 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.578007 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:16Z","lastTransitionTime":"2025-12-11T10:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.671880 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/1.log" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.672792 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/0.log" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.676690 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd" exitCode=1 Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.676738 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd"} Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.676799 4663 scope.go:117] "RemoveContainer" containerID="3becd628b56db394b1c90e56b11dd31accc93eb44c3d73b230c24f12c7a1cd02" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.677386 4663 scope.go:117] "RemoveContainer" containerID="97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd" Dec 11 10:18:16 crc kubenswrapper[4663]: E1211 10:18:16.677543 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.680318 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.680354 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.680366 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.680380 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.680389 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:16Z","lastTransitionTime":"2025-12-11T10:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.688072 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.698705 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.709314 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.719614 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.729210 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.736884 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.747137 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.755752 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.769229 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3becd628b56db394b1c90e56b11dd31accc93eb44c3d73b230c24f12c7a1cd02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:15Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 10:18:15.310864 5951 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1211 10:18:15.310936 5951 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1211 10:18:15.310942 5951 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1211 10:18:15.310953 5951 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1211 10:18:15.310956 5951 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1211 10:18:15.310972 5951 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1211 10:18:15.311000 5951 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1211 10:18:15.311012 5951 factory.go:656] Stopping watch factory\\\\nI1211 10:18:15.311026 5951 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1211 10:18:15.311080 5951 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1211 10:18:15.311088 5951 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1211 10:18:15.311093 5951 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1211 10:18:15.311098 5951 handler.go:208] Removed *v1.Node event handler 2\\\\nI1211 10:18:15.311119 5951 handler.go:208] Removed *v1.Node event handler 7\\\\nI1211 10:18:15.311126 5951 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:16Z\\\",\\\"message\\\":\\\" policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:16.289121 6076 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"ba175bbe-5cc4-47e6-a32d-57693e1320bd\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager/kube-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, b\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.778886 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.783201 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.783251 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.783268 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.783294 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.783312 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:16Z","lastTransitionTime":"2025-12-11T10:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.790481 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.798677 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.808646 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.818579 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.889790 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.889851 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.889865 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.889995 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.890082 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:16Z","lastTransitionTime":"2025-12-11T10:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.992965 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.993005 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.993014 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.993036 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:16 crc kubenswrapper[4663]: I1211 10:18:16.993046 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:16Z","lastTransitionTime":"2025-12-11T10:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.095725 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.095779 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.095790 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.095810 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.095819 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:17Z","lastTransitionTime":"2025-12-11T10:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.198036 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.198075 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.198084 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.198101 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.198110 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:17Z","lastTransitionTime":"2025-12-11T10:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.300223 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.300363 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.300429 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.300495 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.300558 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:17Z","lastTransitionTime":"2025-12-11T10:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.402355 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.402413 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.402428 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.402447 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.402456 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:17Z","lastTransitionTime":"2025-12-11T10:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.503787 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:17 crc kubenswrapper[4663]: E1211 10:18:17.503952 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.504795 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.504842 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.504853 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.504868 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.504877 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:17Z","lastTransitionTime":"2025-12-11T10:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.607094 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.607411 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.607478 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.607541 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.607596 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:17Z","lastTransitionTime":"2025-12-11T10:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.682153 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/1.log" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.686405 4663 scope.go:117] "RemoveContainer" containerID="97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd" Dec 11 10:18:17 crc kubenswrapper[4663]: E1211 10:18:17.686567 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.699265 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.710924 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.712432 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.712457 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.712464 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.712476 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.712485 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:17Z","lastTransitionTime":"2025-12-11T10:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.722455 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.730974 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.740792 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.747938 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.757054 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.768328 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.777110 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.787934 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.798557 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.808334 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.814669 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.814704 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.814713 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.814733 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.814743 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:17Z","lastTransitionTime":"2025-12-11T10:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.817150 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.830019 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:16Z\\\",\\\"message\\\":\\\" policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:16.289121 6076 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"ba175bbe-5cc4-47e6-a32d-57693e1320bd\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager/kube-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, b\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:17Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.918137 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.918176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.918207 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.918221 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:17 crc kubenswrapper[4663]: I1211 10:18:17.918229 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:17Z","lastTransitionTime":"2025-12-11T10:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.021298 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.021356 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.021365 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.021380 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.021389 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:18Z","lastTransitionTime":"2025-12-11T10:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.123906 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.123958 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.123968 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.123999 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.124009 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:18Z","lastTransitionTime":"2025-12-11T10:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.226967 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.227030 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.227041 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.227059 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.227069 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:18Z","lastTransitionTime":"2025-12-11T10:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.329461 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.329510 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.329522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.329537 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.329548 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:18Z","lastTransitionTime":"2025-12-11T10:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.432081 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.432144 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.432156 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.432222 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.432245 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:18Z","lastTransitionTime":"2025-12-11T10:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.504168 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.504265 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:18 crc kubenswrapper[4663]: E1211 10:18:18.504377 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:18 crc kubenswrapper[4663]: E1211 10:18:18.504569 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.535722 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.535770 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.535780 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.535796 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.535807 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:18Z","lastTransitionTime":"2025-12-11T10:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.638303 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.638537 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.638597 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.638659 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.638715 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:18Z","lastTransitionTime":"2025-12-11T10:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.741801 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.741847 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.741859 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.741875 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.741885 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:18Z","lastTransitionTime":"2025-12-11T10:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.846426 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.846533 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.846543 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.846567 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.846577 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:18Z","lastTransitionTime":"2025-12-11T10:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.949313 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.949367 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.949376 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.949395 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:18 crc kubenswrapper[4663]: I1211 10:18:18.949406 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:18Z","lastTransitionTime":"2025-12-11T10:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.052136 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.052177 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.052205 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.052218 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.052226 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.154820 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.154876 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.154895 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.154914 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.154937 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.257265 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.257321 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.257331 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.257346 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.257361 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.359752 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.359826 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.359836 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.359853 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.359862 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.463665 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.463730 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.463743 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.463771 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.463786 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.504383 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:19 crc kubenswrapper[4663]: E1211 10:18:19.504556 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.516731 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.526550 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.534596 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.543244 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.553490 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.563932 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.565134 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.565198 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.565209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.565229 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.565239 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.578574 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.589101 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.598490 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.611675 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.619593 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.629650 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.642468 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.658038 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:16Z\\\",\\\"message\\\":\\\" policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:16.289121 6076 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"ba175bbe-5cc4-47e6-a32d-57693e1320bd\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager/kube-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, b\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.668061 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.668145 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.668162 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.668208 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.668229 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.716741 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6"] Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.717605 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.719199 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.719486 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.729376 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.742298 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:16Z\\\",\\\"message\\\":\\\" policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:16.289121 6076 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"ba175bbe-5cc4-47e6-a32d-57693e1320bd\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager/kube-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, b\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.751259 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.761523 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.770350 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.770753 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.770799 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.770811 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.770826 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.770835 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.778743 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.787378 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.798028 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.806326 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/11443ad0-297e-48c9-9c1e-3771584468ab-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hbks6\" (UID: \"11443ad0-297e-48c9-9c1e-3771584468ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.806378 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/11443ad0-297e-48c9-9c1e-3771584468ab-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hbks6\" (UID: \"11443ad0-297e-48c9-9c1e-3771584468ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.806413 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/11443ad0-297e-48c9-9c1e-3771584468ab-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hbks6\" (UID: \"11443ad0-297e-48c9-9c1e-3771584468ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.806517 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-997ts\" (UniqueName: \"kubernetes.io/projected/11443ad0-297e-48c9-9c1e-3771584468ab-kube-api-access-997ts\") pod \"ovnkube-control-plane-749d76644c-hbks6\" (UID: \"11443ad0-297e-48c9-9c1e-3771584468ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.808328 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.818492 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.829051 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.840249 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.849265 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.849297 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.849306 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.849320 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.849329 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.849420 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: E1211 10:18:19.858569 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.859721 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.861665 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.861705 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.861715 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.861730 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.861739 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.871033 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: E1211 10:18:19.871176 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.874325 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.874360 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.874369 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.874385 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.874394 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: E1211 10:18:19.884050 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.887074 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.887105 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.887115 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.887126 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.887134 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: E1211 10:18:19.896587 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.899671 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.899705 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.899714 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.899727 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.899735 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.908054 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/11443ad0-297e-48c9-9c1e-3771584468ab-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hbks6\" (UID: \"11443ad0-297e-48c9-9c1e-3771584468ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.908103 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-997ts\" (UniqueName: \"kubernetes.io/projected/11443ad0-297e-48c9-9c1e-3771584468ab-kube-api-access-997ts\") pod \"ovnkube-control-plane-749d76644c-hbks6\" (UID: \"11443ad0-297e-48c9-9c1e-3771584468ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.908156 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/11443ad0-297e-48c9-9c1e-3771584468ab-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hbks6\" (UID: \"11443ad0-297e-48c9-9c1e-3771584468ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.908207 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/11443ad0-297e-48c9-9c1e-3771584468ab-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hbks6\" (UID: \"11443ad0-297e-48c9-9c1e-3771584468ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.908746 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/11443ad0-297e-48c9-9c1e-3771584468ab-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hbks6\" (UID: \"11443ad0-297e-48c9-9c1e-3771584468ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.908802 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/11443ad0-297e-48c9-9c1e-3771584468ab-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hbks6\" (UID: \"11443ad0-297e-48c9-9c1e-3771584468ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:19 crc kubenswrapper[4663]: E1211 10:18:19.908853 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:19 crc kubenswrapper[4663]: E1211 10:18:19.908978 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.910536 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.910650 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.910675 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.910691 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.910713 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:19Z","lastTransitionTime":"2025-12-11T10:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.916012 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/11443ad0-297e-48c9-9c1e-3771584468ab-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hbks6\" (UID: \"11443ad0-297e-48c9-9c1e-3771584468ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:19 crc kubenswrapper[4663]: I1211 10:18:19.923052 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-997ts\" (UniqueName: \"kubernetes.io/projected/11443ad0-297e-48c9-9c1e-3771584468ab-kube-api-access-997ts\") pod \"ovnkube-control-plane-749d76644c-hbks6\" (UID: \"11443ad0-297e-48c9-9c1e-3771584468ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.013516 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.013568 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.013580 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.013596 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.013607 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:20Z","lastTransitionTime":"2025-12-11T10:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.028803 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.116174 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.116235 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.116245 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.116262 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.116272 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:20Z","lastTransitionTime":"2025-12-11T10:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.218837 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.218892 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.218902 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.218920 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.218930 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:20Z","lastTransitionTime":"2025-12-11T10:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.321278 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.321339 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.321351 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.321380 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.321396 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:20Z","lastTransitionTime":"2025-12-11T10:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.422207 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-5r5q6"] Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.422726 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:20 crc kubenswrapper[4663]: E1211 10:18:20.422795 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.423718 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.423743 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.423751 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.423762 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.423772 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:20Z","lastTransitionTime":"2025-12-11T10:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.432367 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.447420 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.460133 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.476669 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.488990 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.497141 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.504638 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.504666 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:20 crc kubenswrapper[4663]: E1211 10:18:20.504742 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:20 crc kubenswrapper[4663]: E1211 10:18:20.504887 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.508563 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.515121 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.515167 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wsjw\" (UniqueName: \"kubernetes.io/projected/54c64914-8d10-48bc-a755-1f7257400a32-kube-api-access-8wsjw\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.518221 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.526171 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.526220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.526231 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.526244 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.526255 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:20Z","lastTransitionTime":"2025-12-11T10:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.526658 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.534905 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.544558 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.552512 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.563133 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.571827 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.585516 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:16Z\\\",\\\"message\\\":\\\" policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:16.289121 6076 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"ba175bbe-5cc4-47e6-a32d-57693e1320bd\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager/kube-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, b\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.595418 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.616031 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.616093 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wsjw\" (UniqueName: \"kubernetes.io/projected/54c64914-8d10-48bc-a755-1f7257400a32-kube-api-access-8wsjw\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:20 crc kubenswrapper[4663]: E1211 10:18:20.616228 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:20 crc kubenswrapper[4663]: E1211 10:18:20.616300 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs podName:54c64914-8d10-48bc-a755-1f7257400a32 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:21.116279582 +0000 UTC m=+31.934209367 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs") pod "network-metrics-daemon-5r5q6" (UID: "54c64914-8d10-48bc-a755-1f7257400a32") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.628268 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.628314 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.628325 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.628346 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.628358 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:20Z","lastTransitionTime":"2025-12-11T10:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.629910 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wsjw\" (UniqueName: \"kubernetes.io/projected/54c64914-8d10-48bc-a755-1f7257400a32-kube-api-access-8wsjw\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.698426 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" event={"ID":"11443ad0-297e-48c9-9c1e-3771584468ab","Type":"ContainerStarted","Data":"d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3"} Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.698499 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" event={"ID":"11443ad0-297e-48c9-9c1e-3771584468ab","Type":"ContainerStarted","Data":"6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34"} Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.698511 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" event={"ID":"11443ad0-297e-48c9-9c1e-3771584468ab","Type":"ContainerStarted","Data":"a4267eceef513c60abccc9c346605ae7affde71a8ed1ea1814566f024b7d0679"} Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.709120 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.721683 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.730097 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.730140 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.730149 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.730168 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.730194 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:20Z","lastTransitionTime":"2025-12-11T10:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.731788 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.745834 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:16Z\\\",\\\"message\\\":\\\" policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:16.289121 6076 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"ba175bbe-5cc4-47e6-a32d-57693e1320bd\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager/kube-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, b\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.755391 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.762817 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.771252 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.779799 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.792728 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.801418 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.811952 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.819172 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.828451 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.831975 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.832081 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.832154 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.832309 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.832373 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:20Z","lastTransitionTime":"2025-12-11T10:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.838343 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.847342 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.860472 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:20Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.934818 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.934868 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.934881 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.934896 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:20 crc kubenswrapper[4663]: I1211 10:18:20.934906 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:20Z","lastTransitionTime":"2025-12-11T10:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.037471 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.037524 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.037535 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.037549 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.037560 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:21Z","lastTransitionTime":"2025-12-11T10:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.121572 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:21 crc kubenswrapper[4663]: E1211 10:18:21.121738 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:21 crc kubenswrapper[4663]: E1211 10:18:21.121800 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs podName:54c64914-8d10-48bc-a755-1f7257400a32 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:22.121784809 +0000 UTC m=+32.939714593 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs") pod "network-metrics-daemon-5r5q6" (UID: "54c64914-8d10-48bc-a755-1f7257400a32") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.140287 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.140336 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.140346 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.140359 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.140367 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:21Z","lastTransitionTime":"2025-12-11T10:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.242660 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.242698 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.242707 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.242720 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.242729 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:21Z","lastTransitionTime":"2025-12-11T10:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.345309 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.345345 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.345355 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.345368 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.345376 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:21Z","lastTransitionTime":"2025-12-11T10:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.447689 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.447718 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.447725 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.447736 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.447743 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:21Z","lastTransitionTime":"2025-12-11T10:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.504538 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.504550 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:21 crc kubenswrapper[4663]: E1211 10:18:21.504832 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:21 crc kubenswrapper[4663]: E1211 10:18:21.504959 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.550519 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.550548 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.550558 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.550571 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.550581 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:21Z","lastTransitionTime":"2025-12-11T10:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.652743 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.652807 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.652853 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.652869 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.652879 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:21Z","lastTransitionTime":"2025-12-11T10:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.755026 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.755058 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.755067 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.755148 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.755160 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:21Z","lastTransitionTime":"2025-12-11T10:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.857523 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.857744 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.857753 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.857767 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.857777 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:21Z","lastTransitionTime":"2025-12-11T10:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.959769 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.959834 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.959843 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.959857 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:21 crc kubenswrapper[4663]: I1211 10:18:21.959867 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:21Z","lastTransitionTime":"2025-12-11T10:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.062292 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.062348 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.062358 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.062369 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.062379 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:22Z","lastTransitionTime":"2025-12-11T10:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.130906 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.131032 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.131085 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs podName:54c64914-8d10-48bc-a755-1f7257400a32 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:24.131073254 +0000 UTC m=+34.949003038 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs") pod "network-metrics-daemon-5r5q6" (UID: "54c64914-8d10-48bc-a755-1f7257400a32") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.164198 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.164230 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.164239 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.164253 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.164262 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:22Z","lastTransitionTime":"2025-12-11T10:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.266998 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.267048 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.267058 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.267076 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.267084 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:22Z","lastTransitionTime":"2025-12-11T10:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.332771 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.332896 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.332920 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.332982 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:18:38.332956296 +0000 UTC m=+49.150886080 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.333028 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.333080 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.333092 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:38.333078319 +0000 UTC m=+49.151008103 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.333256 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.333312 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:38.333304274 +0000 UTC m=+49.151234058 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.333667 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.333711 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.333737 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.333817 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:38.333805082 +0000 UTC m=+49.151734867 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.368794 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.368825 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.368833 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.368845 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.368853 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:22Z","lastTransitionTime":"2025-12-11T10:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.433822 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.433959 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.433975 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.433985 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.434027 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:38.434016084 +0000 UTC m=+49.251945867 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.470905 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.470932 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.470941 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.470955 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.470964 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:22Z","lastTransitionTime":"2025-12-11T10:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.504430 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.504443 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.504536 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:22 crc kubenswrapper[4663]: E1211 10:18:22.504970 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.573719 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.573753 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.573762 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.573788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.573797 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:22Z","lastTransitionTime":"2025-12-11T10:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.676121 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.676163 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.676179 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.676210 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.676221 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:22Z","lastTransitionTime":"2025-12-11T10:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.779085 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.779125 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.779134 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.779150 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.779159 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:22Z","lastTransitionTime":"2025-12-11T10:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.881894 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.881967 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.881982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.882007 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.882019 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:22Z","lastTransitionTime":"2025-12-11T10:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.984964 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.985029 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.985041 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.985065 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:22 crc kubenswrapper[4663]: I1211 10:18:22.985084 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:22Z","lastTransitionTime":"2025-12-11T10:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.088339 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.088387 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.088397 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.088410 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.088420 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:23Z","lastTransitionTime":"2025-12-11T10:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.190009 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.190046 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.190053 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.190066 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.190074 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:23Z","lastTransitionTime":"2025-12-11T10:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.291947 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.292003 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.292013 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.292035 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.292045 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:23Z","lastTransitionTime":"2025-12-11T10:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.394480 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.394511 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.394518 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.394530 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.394538 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:23Z","lastTransitionTime":"2025-12-11T10:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.496946 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.496990 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.497000 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.497014 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.497023 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:23Z","lastTransitionTime":"2025-12-11T10:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.503994 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.504032 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:23 crc kubenswrapper[4663]: E1211 10:18:23.504117 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:23 crc kubenswrapper[4663]: E1211 10:18:23.504217 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.599523 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.599563 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.599572 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.599585 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.599595 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:23Z","lastTransitionTime":"2025-12-11T10:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.701290 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.701329 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.701337 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.701352 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.701361 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:23Z","lastTransitionTime":"2025-12-11T10:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.803150 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.803212 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.803220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.803236 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.803250 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:23Z","lastTransitionTime":"2025-12-11T10:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.905168 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.905210 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.905219 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.905231 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:23 crc kubenswrapper[4663]: I1211 10:18:23.905238 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:23Z","lastTransitionTime":"2025-12-11T10:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.007556 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.007578 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.007586 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.007595 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.007602 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:24Z","lastTransitionTime":"2025-12-11T10:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.110203 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.110254 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.110263 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.110282 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.110292 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:24Z","lastTransitionTime":"2025-12-11T10:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.151843 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:24 crc kubenswrapper[4663]: E1211 10:18:24.151992 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:24 crc kubenswrapper[4663]: E1211 10:18:24.152055 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs podName:54c64914-8d10-48bc-a755-1f7257400a32 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:28.152039778 +0000 UTC m=+38.969969572 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs") pod "network-metrics-daemon-5r5q6" (UID: "54c64914-8d10-48bc-a755-1f7257400a32") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.212428 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.212472 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.212480 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.212491 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.212499 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:24Z","lastTransitionTime":"2025-12-11T10:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.314564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.314596 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.314604 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.314616 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.314625 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:24Z","lastTransitionTime":"2025-12-11T10:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.416827 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.416884 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.416893 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.416906 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.416915 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:24Z","lastTransitionTime":"2025-12-11T10:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.429364 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.441862 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.453084 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.469805 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:16Z\\\",\\\"message\\\":\\\" policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:16.289121 6076 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"ba175bbe-5cc4-47e6-a32d-57693e1320bd\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager/kube-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, b\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.477726 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.485710 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.493871 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.500369 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.504614 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:24 crc kubenswrapper[4663]: E1211 10:18:24.504767 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.504673 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:24 crc kubenswrapper[4663]: E1211 10:18:24.504948 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.508451 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.516629 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.519057 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.519088 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.519096 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.519108 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.519116 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:24Z","lastTransitionTime":"2025-12-11T10:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.523913 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.531578 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.539613 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.547029 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.554727 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.564860 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.572209 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:24Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.620770 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.620800 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.620809 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.620823 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.620832 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:24Z","lastTransitionTime":"2025-12-11T10:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.723348 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.723413 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.723424 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.723446 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.723462 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:24Z","lastTransitionTime":"2025-12-11T10:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.825503 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.825568 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.825584 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.825602 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.825615 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:24Z","lastTransitionTime":"2025-12-11T10:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.927968 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.928004 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.928012 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.928026 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:24 crc kubenswrapper[4663]: I1211 10:18:24.928036 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:24Z","lastTransitionTime":"2025-12-11T10:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.030549 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.030585 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.030594 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.030610 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.030620 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:25Z","lastTransitionTime":"2025-12-11T10:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.133319 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.133379 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.133388 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.133402 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.133411 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:25Z","lastTransitionTime":"2025-12-11T10:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.235864 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.235904 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.235913 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.235926 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.235936 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:25Z","lastTransitionTime":"2025-12-11T10:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.337568 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.337605 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.337613 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.337626 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.337635 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:25Z","lastTransitionTime":"2025-12-11T10:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.440025 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.440074 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.440084 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.440097 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.440106 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:25Z","lastTransitionTime":"2025-12-11T10:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.504174 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:25 crc kubenswrapper[4663]: E1211 10:18:25.504301 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.504177 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:25 crc kubenswrapper[4663]: E1211 10:18:25.504452 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.541906 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.541939 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.541948 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.541959 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.541968 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:25Z","lastTransitionTime":"2025-12-11T10:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.644377 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.644401 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.644410 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.644421 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.644429 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:25Z","lastTransitionTime":"2025-12-11T10:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.746249 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.746276 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.746284 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.746293 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.746301 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:25Z","lastTransitionTime":"2025-12-11T10:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.848338 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.848364 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.848371 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.848380 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.848404 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:25Z","lastTransitionTime":"2025-12-11T10:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.949799 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.949828 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.949838 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.949848 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:25 crc kubenswrapper[4663]: I1211 10:18:25.949878 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:25Z","lastTransitionTime":"2025-12-11T10:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.051915 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.051949 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.051960 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.051973 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.051981 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:26Z","lastTransitionTime":"2025-12-11T10:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.153990 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.154016 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.154027 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.154040 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.154047 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:26Z","lastTransitionTime":"2025-12-11T10:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.256029 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.256068 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.256077 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.256090 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.256099 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:26Z","lastTransitionTime":"2025-12-11T10:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.357513 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.357542 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.357551 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.357562 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.357570 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:26Z","lastTransitionTime":"2025-12-11T10:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.460047 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.460089 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.460098 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.460112 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.460122 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:26Z","lastTransitionTime":"2025-12-11T10:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.503929 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.503935 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:26 crc kubenswrapper[4663]: E1211 10:18:26.504049 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:26 crc kubenswrapper[4663]: E1211 10:18:26.504116 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.562386 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.562520 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.562604 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.562673 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.562741 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:26Z","lastTransitionTime":"2025-12-11T10:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.664493 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.664995 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.665070 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.665149 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.665252 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:26Z","lastTransitionTime":"2025-12-11T10:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.766895 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.766946 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.766955 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.766966 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.766976 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:26Z","lastTransitionTime":"2025-12-11T10:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.869462 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.869505 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.869516 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.869528 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.869538 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:26Z","lastTransitionTime":"2025-12-11T10:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.972039 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.972210 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.972285 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.972354 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:26 crc kubenswrapper[4663]: I1211 10:18:26.972427 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:26Z","lastTransitionTime":"2025-12-11T10:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.074353 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.074543 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.074616 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.074712 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.074777 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:27Z","lastTransitionTime":"2025-12-11T10:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.176608 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.176650 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.176659 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.176673 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.176683 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:27Z","lastTransitionTime":"2025-12-11T10:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.279434 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.279506 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.279517 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.279545 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.279572 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:27Z","lastTransitionTime":"2025-12-11T10:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.381136 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.381210 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.381221 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.381237 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.381247 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:27Z","lastTransitionTime":"2025-12-11T10:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.483123 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.483172 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.483199 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.483214 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.483224 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:27Z","lastTransitionTime":"2025-12-11T10:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.504441 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.504482 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:27 crc kubenswrapper[4663]: E1211 10:18:27.504587 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:27 crc kubenswrapper[4663]: E1211 10:18:27.504673 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.585315 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.585343 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.585352 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.585364 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.585374 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:27Z","lastTransitionTime":"2025-12-11T10:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.687260 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.687294 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.687303 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.687319 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.687330 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:27Z","lastTransitionTime":"2025-12-11T10:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.789409 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.789447 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.789455 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.789471 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.789480 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:27Z","lastTransitionTime":"2025-12-11T10:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.891039 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.891090 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.891098 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.891112 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.891121 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:27Z","lastTransitionTime":"2025-12-11T10:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.993694 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.993748 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.993762 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.993777 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:27 crc kubenswrapper[4663]: I1211 10:18:27.993789 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:27Z","lastTransitionTime":"2025-12-11T10:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.096096 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.096158 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.096167 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.096197 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.096208 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:28Z","lastTransitionTime":"2025-12-11T10:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.187068 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:28 crc kubenswrapper[4663]: E1211 10:18:28.187207 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:28 crc kubenswrapper[4663]: E1211 10:18:28.187259 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs podName:54c64914-8d10-48bc-a755-1f7257400a32 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:36.187245215 +0000 UTC m=+47.005175000 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs") pod "network-metrics-daemon-5r5q6" (UID: "54c64914-8d10-48bc-a755-1f7257400a32") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.198531 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.198577 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.198589 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.198610 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.198621 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:28Z","lastTransitionTime":"2025-12-11T10:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.300993 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.301054 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.301064 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.301076 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.301084 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:28Z","lastTransitionTime":"2025-12-11T10:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.402763 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.402809 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.402820 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.402832 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.402840 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:28Z","lastTransitionTime":"2025-12-11T10:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.503885 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:28 crc kubenswrapper[4663]: E1211 10:18:28.504015 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.504076 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:28 crc kubenswrapper[4663]: E1211 10:18:28.504173 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.505575 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.505596 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.505626 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.505636 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.505644 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:28Z","lastTransitionTime":"2025-12-11T10:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.608175 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.608243 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.608252 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.608276 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.608285 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:28Z","lastTransitionTime":"2025-12-11T10:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.710478 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.710546 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.710556 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.710567 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.710577 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:28Z","lastTransitionTime":"2025-12-11T10:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.813037 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.813076 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.813084 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.813100 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.813109 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:28Z","lastTransitionTime":"2025-12-11T10:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.915129 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.915167 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.915176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.915209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:28 crc kubenswrapper[4663]: I1211 10:18:28.915219 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:28Z","lastTransitionTime":"2025-12-11T10:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.019374 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.019403 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.019412 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.019423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.019432 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.121989 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.122033 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.122042 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.122053 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.122062 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.224480 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.224527 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.224537 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.224553 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.224563 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.326958 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.326992 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.327000 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.327014 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.327021 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.429386 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.429433 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.429441 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.429472 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.429483 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.504583 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.504676 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:29 crc kubenswrapper[4663]: E1211 10:18:29.504786 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:29 crc kubenswrapper[4663]: E1211 10:18:29.504873 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.516484 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.524941 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.531098 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.531134 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.531144 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.531157 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.531165 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.539658 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:16Z\\\",\\\"message\\\":\\\" policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:16.289121 6076 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"ba175bbe-5cc4-47e6-a32d-57693e1320bd\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager/kube-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, b\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.550660 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.558810 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.566357 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.573578 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.584856 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.592322 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.600490 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.609261 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.617618 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.625831 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.632788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.632855 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.632870 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.632886 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.632897 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.635083 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.641472 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.651464 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.734768 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.734821 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.734832 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.734847 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.734858 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.836901 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.836958 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.836970 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.836988 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.837002 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.930342 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.930375 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.930385 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.930396 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.930403 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: E1211 10:18:29.942344 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.945336 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.945396 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.945407 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.945422 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.945657 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: E1211 10:18:29.955938 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.959396 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.959422 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.959443 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.959455 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.959464 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: E1211 10:18:29.968528 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.971412 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.971450 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.971460 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.971473 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.971481 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: E1211 10:18:29.980071 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.982956 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.982979 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.983012 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.983022 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.983029 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:29 crc kubenswrapper[4663]: E1211 10:18:29.991935 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:29Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:29 crc kubenswrapper[4663]: E1211 10:18:29.992038 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.993248 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.993280 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.993290 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.993301 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:29 crc kubenswrapper[4663]: I1211 10:18:29.993310 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:29Z","lastTransitionTime":"2025-12-11T10:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.095941 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.095971 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.095982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.096009 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.096017 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:30Z","lastTransitionTime":"2025-12-11T10:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.198112 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.198166 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.198175 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.198210 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.198220 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:30Z","lastTransitionTime":"2025-12-11T10:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.300006 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.300038 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.300048 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.300077 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.300085 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:30Z","lastTransitionTime":"2025-12-11T10:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.402862 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.402904 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.402912 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.402928 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.402938 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:30Z","lastTransitionTime":"2025-12-11T10:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.503884 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.503920 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:30 crc kubenswrapper[4663]: E1211 10:18:30.504005 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:30 crc kubenswrapper[4663]: E1211 10:18:30.504324 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.504611 4663 scope.go:117] "RemoveContainer" containerID="97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.505498 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.505529 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.505549 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.505562 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.505572 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:30Z","lastTransitionTime":"2025-12-11T10:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.608046 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.608091 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.608100 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.608114 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.608125 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:30Z","lastTransitionTime":"2025-12-11T10:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.709983 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.710016 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.710024 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.710038 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.710048 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:30Z","lastTransitionTime":"2025-12-11T10:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.730551 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/1.log" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.733202 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerStarted","Data":"ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52"} Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.733317 4663 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.748297 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:16Z\\\",\\\"message\\\":\\\" policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:16.289121 6076 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"ba175bbe-5cc4-47e6-a32d-57693e1320bd\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager/kube-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, b\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.757895 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.769593 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.779039 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.795733 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.811475 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.812934 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.812962 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.812974 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.812990 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.813002 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:30Z","lastTransitionTime":"2025-12-11T10:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.824024 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.835322 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.849459 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.857650 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.867852 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.879350 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.888117 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.897683 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.906517 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.914744 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.914778 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.914791 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.914808 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.914818 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:30Z","lastTransitionTime":"2025-12-11T10:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:30 crc kubenswrapper[4663]: I1211 10:18:30.914883 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:30Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.017397 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.017456 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.017467 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.017502 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.017516 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:31Z","lastTransitionTime":"2025-12-11T10:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.119532 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.119566 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.119578 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.119593 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.119601 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:31Z","lastTransitionTime":"2025-12-11T10:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.222456 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.222499 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.222509 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.222522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.222532 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:31Z","lastTransitionTime":"2025-12-11T10:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.324964 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.324999 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.325007 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.325019 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.325027 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:31Z","lastTransitionTime":"2025-12-11T10:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.426927 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.426965 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.426975 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.426988 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.426999 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:31Z","lastTransitionTime":"2025-12-11T10:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.504730 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.504748 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:31 crc kubenswrapper[4663]: E1211 10:18:31.505004 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:31 crc kubenswrapper[4663]: E1211 10:18:31.504890 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.528869 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.528907 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.528916 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.528927 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.528938 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:31Z","lastTransitionTime":"2025-12-11T10:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.631301 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.631339 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.631348 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.631373 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.631382 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:31Z","lastTransitionTime":"2025-12-11T10:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.734168 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.734233 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.734244 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.734255 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.734266 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:31Z","lastTransitionTime":"2025-12-11T10:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.738027 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/2.log" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.738687 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/1.log" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.741515 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52" exitCode=1 Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.741615 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52"} Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.741729 4663 scope.go:117] "RemoveContainer" containerID="97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.743677 4663 scope.go:117] "RemoveContainer" containerID="ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52" Dec 11 10:18:31 crc kubenswrapper[4663]: E1211 10:18:31.743962 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.756053 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.766955 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.781199 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97cb732945f473415113069fc8c737be33f8e78f7f7dc1a11ef6c7678ec3f3fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:16Z\\\",\\\"message\\\":\\\" policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:16Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:16.289121 6076 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager/kube-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"ba175bbe-5cc4-47e6-a32d-57693e1320bd\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager/kube-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, b\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:31Z\\\",\\\"message\\\":\\\"11 10:18:31.160898 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nI1211 10:18:31.160908 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nF1211 10:18:31.160914 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:31.160923 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1211 10:18:31.160928 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.791916 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.800703 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.807790 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.815510 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.823848 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.831261 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.836729 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.836756 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.836765 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.836780 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.836791 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:31Z","lastTransitionTime":"2025-12-11T10:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.847711 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.857021 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.866453 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.875105 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.889165 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.897219 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.905648 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.939643 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.939680 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.939690 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.939707 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:31 crc kubenswrapper[4663]: I1211 10:18:31.939718 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:31Z","lastTransitionTime":"2025-12-11T10:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.042017 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.042052 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.042061 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.042074 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.042083 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:32Z","lastTransitionTime":"2025-12-11T10:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.144905 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.144973 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.144987 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.145010 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.145024 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:32Z","lastTransitionTime":"2025-12-11T10:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.247365 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.247417 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.247426 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.247443 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.247455 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:32Z","lastTransitionTime":"2025-12-11T10:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.307973 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.349062 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.349109 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.349122 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.349136 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.349145 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:32Z","lastTransitionTime":"2025-12-11T10:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.452029 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.452085 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.452095 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.452109 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.452118 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:32Z","lastTransitionTime":"2025-12-11T10:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.504250 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:32 crc kubenswrapper[4663]: E1211 10:18:32.504401 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.504272 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:32 crc kubenswrapper[4663]: E1211 10:18:32.504633 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.554854 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.554885 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.554893 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.554907 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.554917 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:32Z","lastTransitionTime":"2025-12-11T10:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.656722 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.656765 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.656773 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.656785 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.656795 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:32Z","lastTransitionTime":"2025-12-11T10:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.746493 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/2.log" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.750062 4663 scope.go:117] "RemoveContainer" containerID="ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52" Dec 11 10:18:32 crc kubenswrapper[4663]: E1211 10:18:32.750242 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.758046 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.758073 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.758082 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.758095 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.758106 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:32Z","lastTransitionTime":"2025-12-11T10:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.764558 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.777263 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.787840 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.802978 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:31Z\\\",\\\"message\\\":\\\"11 10:18:31.160898 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nI1211 10:18:31.160908 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nF1211 10:18:31.160914 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:31.160923 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1211 10:18:31.160928 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.814447 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.822882 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.833213 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.856407 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.859751 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.859792 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.859800 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.859814 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.859822 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:32Z","lastTransitionTime":"2025-12-11T10:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.867440 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.885322 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.898119 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.907450 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.917017 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.928255 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.938420 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.948781 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:32Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.962563 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.962598 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.962607 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.962620 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:32 crc kubenswrapper[4663]: I1211 10:18:32.962630 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:32Z","lastTransitionTime":"2025-12-11T10:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.064851 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.064889 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.064898 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.064911 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.064920 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:33Z","lastTransitionTime":"2025-12-11T10:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.167614 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.167659 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.167669 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.167683 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.167693 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:33Z","lastTransitionTime":"2025-12-11T10:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.269755 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.269796 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.269804 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.269817 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.269826 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:33Z","lastTransitionTime":"2025-12-11T10:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.372833 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.372876 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.372884 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.372897 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.372906 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:33Z","lastTransitionTime":"2025-12-11T10:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.475066 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.475105 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.475114 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.475125 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.475134 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:33Z","lastTransitionTime":"2025-12-11T10:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.503871 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:33 crc kubenswrapper[4663]: E1211 10:18:33.503997 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.504143 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:33 crc kubenswrapper[4663]: E1211 10:18:33.504360 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.577823 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.577862 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.577874 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.577886 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.577894 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:33Z","lastTransitionTime":"2025-12-11T10:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.679833 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.679877 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.679886 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.679899 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.679908 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:33Z","lastTransitionTime":"2025-12-11T10:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.782515 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.782559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.782569 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.782582 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.782595 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:33Z","lastTransitionTime":"2025-12-11T10:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.885068 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.885101 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.885111 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.885123 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.885131 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:33Z","lastTransitionTime":"2025-12-11T10:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.987549 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.987597 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.987607 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.987619 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:33 crc kubenswrapper[4663]: I1211 10:18:33.987627 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:33Z","lastTransitionTime":"2025-12-11T10:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.089622 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.089665 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.089674 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.089689 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.089698 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:34Z","lastTransitionTime":"2025-12-11T10:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.192652 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.192709 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.192717 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.192743 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.192754 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:34Z","lastTransitionTime":"2025-12-11T10:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.295155 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.295210 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.295218 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.295242 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.295255 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:34Z","lastTransitionTime":"2025-12-11T10:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.397628 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.397665 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.397675 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.397687 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.397695 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:34Z","lastTransitionTime":"2025-12-11T10:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.499730 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.499774 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.499784 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.499797 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.499806 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:34Z","lastTransitionTime":"2025-12-11T10:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.504165 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:34 crc kubenswrapper[4663]: E1211 10:18:34.504298 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.504172 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:34 crc kubenswrapper[4663]: E1211 10:18:34.504431 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.601464 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.601507 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.601517 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.601532 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.601543 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:34Z","lastTransitionTime":"2025-12-11T10:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.703287 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.703332 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.703342 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.703356 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.703366 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:34Z","lastTransitionTime":"2025-12-11T10:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.804818 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.804863 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.804871 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.804889 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.804900 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:34Z","lastTransitionTime":"2025-12-11T10:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.906467 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.906506 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.906515 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.906526 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:34 crc kubenswrapper[4663]: I1211 10:18:34.906534 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:34Z","lastTransitionTime":"2025-12-11T10:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.008008 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.008037 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.008045 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.008057 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.008067 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:35Z","lastTransitionTime":"2025-12-11T10:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.110110 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.110158 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.110167 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.110219 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.110232 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:35Z","lastTransitionTime":"2025-12-11T10:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.212386 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.212422 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.212431 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.212445 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.212454 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:35Z","lastTransitionTime":"2025-12-11T10:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.314332 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.314377 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.314386 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.314399 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.314410 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:35Z","lastTransitionTime":"2025-12-11T10:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.417083 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.417125 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.417133 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.417149 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.417159 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:35Z","lastTransitionTime":"2025-12-11T10:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.504227 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.504227 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:35 crc kubenswrapper[4663]: E1211 10:18:35.504438 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:35 crc kubenswrapper[4663]: E1211 10:18:35.504361 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.519036 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.519064 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.519072 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.519082 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.519091 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:35Z","lastTransitionTime":"2025-12-11T10:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.622040 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.622103 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.622113 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.622126 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.622136 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:35Z","lastTransitionTime":"2025-12-11T10:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.724953 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.724995 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.725006 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.725018 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.725027 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:35Z","lastTransitionTime":"2025-12-11T10:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.827477 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.827516 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.827545 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.827559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.827568 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:35Z","lastTransitionTime":"2025-12-11T10:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.930396 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.930438 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.930446 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.930460 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:35 crc kubenswrapper[4663]: I1211 10:18:35.930468 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:35Z","lastTransitionTime":"2025-12-11T10:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.032623 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.032669 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.032678 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.032692 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.032702 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:36Z","lastTransitionTime":"2025-12-11T10:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.134737 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.134773 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.134782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.134793 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.134801 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:36Z","lastTransitionTime":"2025-12-11T10:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.236979 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.237015 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.237041 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.237055 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.237066 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:36Z","lastTransitionTime":"2025-12-11T10:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.264751 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:36 crc kubenswrapper[4663]: E1211 10:18:36.264862 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:36 crc kubenswrapper[4663]: E1211 10:18:36.264931 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs podName:54c64914-8d10-48bc-a755-1f7257400a32 nodeName:}" failed. No retries permitted until 2025-12-11 10:18:52.26491357 +0000 UTC m=+63.082843364 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs") pod "network-metrics-daemon-5r5q6" (UID: "54c64914-8d10-48bc-a755-1f7257400a32") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.339496 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.339531 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.339543 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.339556 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.339566 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:36Z","lastTransitionTime":"2025-12-11T10:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.441382 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.441420 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.441429 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.441441 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.441451 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:36Z","lastTransitionTime":"2025-12-11T10:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.504267 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.504327 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:36 crc kubenswrapper[4663]: E1211 10:18:36.504366 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:36 crc kubenswrapper[4663]: E1211 10:18:36.504397 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.543758 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.543819 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.543829 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.543845 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.543854 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:36Z","lastTransitionTime":"2025-12-11T10:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.645836 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.645871 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.645882 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.645894 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.645904 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:36Z","lastTransitionTime":"2025-12-11T10:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.747909 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.747972 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.747985 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.747999 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.748011 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:36Z","lastTransitionTime":"2025-12-11T10:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.850512 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.850549 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.850558 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.850571 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.850579 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:36Z","lastTransitionTime":"2025-12-11T10:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.953126 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.953213 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.953224 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.953235 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:36 crc kubenswrapper[4663]: I1211 10:18:36.953245 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:36Z","lastTransitionTime":"2025-12-11T10:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.055299 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.055340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.055348 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.055361 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.055373 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:37Z","lastTransitionTime":"2025-12-11T10:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.157528 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.157581 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.157590 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.157603 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.157616 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:37Z","lastTransitionTime":"2025-12-11T10:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.259404 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.259438 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.259447 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.259458 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.259465 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:37Z","lastTransitionTime":"2025-12-11T10:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.361340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.361380 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.361389 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.361401 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.361409 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:37Z","lastTransitionTime":"2025-12-11T10:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.462948 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.462992 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.463002 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.463018 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.463038 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:37Z","lastTransitionTime":"2025-12-11T10:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.504549 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.504677 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:37 crc kubenswrapper[4663]: E1211 10:18:37.504884 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:37 crc kubenswrapper[4663]: E1211 10:18:37.504957 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.565888 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.565938 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.565948 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.565963 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.565974 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:37Z","lastTransitionTime":"2025-12-11T10:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.668108 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.668163 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.668199 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.668214 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.668225 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:37Z","lastTransitionTime":"2025-12-11T10:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.769950 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.769997 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.770009 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.770024 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.770034 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:37Z","lastTransitionTime":"2025-12-11T10:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.872263 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.872341 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.872351 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.872365 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.872376 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:37Z","lastTransitionTime":"2025-12-11T10:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.974913 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.974943 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.974950 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.974960 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:37 crc kubenswrapper[4663]: I1211 10:18:37.974968 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:37Z","lastTransitionTime":"2025-12-11T10:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.076964 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.077025 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.077034 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.077047 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.077076 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:38Z","lastTransitionTime":"2025-12-11T10:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.179585 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.179626 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.179635 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.179648 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.179658 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:38Z","lastTransitionTime":"2025-12-11T10:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.282900 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.282955 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.282963 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.282978 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.282987 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:38Z","lastTransitionTime":"2025-12-11T10:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.384142 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.384300 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:19:10.384275072 +0000 UTC m=+81.202204856 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.384338 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.384363 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.384393 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.384485 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.384492 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.384532 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:19:10.384523778 +0000 UTC m=+81.202453563 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.384556 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:19:10.384543786 +0000 UTC m=+81.202473569 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.384694 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.384727 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.384742 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.384805 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 10:19:10.384778125 +0000 UTC m=+81.202707909 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.384980 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.385009 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.385017 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.385032 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.385054 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:38Z","lastTransitionTime":"2025-12-11T10:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.485318 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.485630 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.485668 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.485683 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.485751 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 10:19:10.485735044 +0000 UTC m=+81.303664828 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.487136 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.487219 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.487231 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.487246 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.487256 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:38Z","lastTransitionTime":"2025-12-11T10:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.503616 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.503644 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.503737 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:38 crc kubenswrapper[4663]: E1211 10:18:38.503805 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.589670 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.589710 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.589718 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.589732 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.589741 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:38Z","lastTransitionTime":"2025-12-11T10:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.691503 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.691544 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.691552 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.691564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.691573 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:38Z","lastTransitionTime":"2025-12-11T10:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.793427 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.793466 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.793476 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.793489 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.793500 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:38Z","lastTransitionTime":"2025-12-11T10:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.895506 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.895540 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.895548 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.895561 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.895570 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:38Z","lastTransitionTime":"2025-12-11T10:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.997483 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.997538 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.997551 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.997566 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:38 crc kubenswrapper[4663]: I1211 10:18:38.997578 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:38Z","lastTransitionTime":"2025-12-11T10:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.099650 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.099705 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.099715 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.099732 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.099743 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:39Z","lastTransitionTime":"2025-12-11T10:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.202131 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.202210 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.202219 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.202235 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.202245 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:39Z","lastTransitionTime":"2025-12-11T10:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.304411 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.304449 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.304458 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.304471 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.304481 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:39Z","lastTransitionTime":"2025-12-11T10:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.407216 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.407258 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.407267 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.407279 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.407289 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:39Z","lastTransitionTime":"2025-12-11T10:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.503670 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.503734 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:39 crc kubenswrapper[4663]: E1211 10:18:39.503819 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:39 crc kubenswrapper[4663]: E1211 10:18:39.503875 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.509266 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.509292 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.509301 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.509314 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.509324 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:39Z","lastTransitionTime":"2025-12-11T10:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.516109 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.524159 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.532421 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.541216 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.548422 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.556687 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.565328 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.576466 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.585816 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.596068 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.603128 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.610782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.610817 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.610826 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.610840 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.610852 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:39Z","lastTransitionTime":"2025-12-11T10:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.612168 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.621363 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.629058 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.641283 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:31Z\\\",\\\"message\\\":\\\"11 10:18:31.160898 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nI1211 10:18:31.160908 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nF1211 10:18:31.160914 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:31.160923 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1211 10:18:31.160928 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.649611 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:39Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.712633 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.712674 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.712683 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.712696 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.712706 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:39Z","lastTransitionTime":"2025-12-11T10:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.815009 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.815056 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.815066 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.815080 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.815091 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:39Z","lastTransitionTime":"2025-12-11T10:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.917384 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.917441 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.917450 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.917464 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:39 crc kubenswrapper[4663]: I1211 10:18:39.917473 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:39Z","lastTransitionTime":"2025-12-11T10:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.019673 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.019705 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.019713 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.019724 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.019732 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.121995 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.122038 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.122051 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.122065 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.122074 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.223608 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.223641 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.223650 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.223662 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.223672 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.301744 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.301782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.301792 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.301804 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.301812 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: E1211 10:18:40.310593 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:40Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.313211 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.313238 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.313247 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.313258 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.313266 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: E1211 10:18:40.320726 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:40Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.322920 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.322974 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.322984 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.322997 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.323006 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: E1211 10:18:40.330968 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:40Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.333523 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.333574 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.333584 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.333595 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.333603 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: E1211 10:18:40.341360 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:40Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.343161 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.343199 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.343209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.343221 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.343229 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: E1211 10:18:40.350802 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:40Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:40 crc kubenswrapper[4663]: E1211 10:18:40.350927 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.351814 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.351857 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.351882 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.351891 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.351899 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.454052 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.454089 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.454097 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.454107 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.454115 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.503733 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.503752 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:40 crc kubenswrapper[4663]: E1211 10:18:40.503825 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:40 crc kubenswrapper[4663]: E1211 10:18:40.503880 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.556580 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.556609 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.556620 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.556632 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.556640 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.658624 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.658659 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.658668 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.658681 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.658691 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.760174 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.760237 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.760246 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.760261 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.760270 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.862059 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.862101 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.862109 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.862120 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.862130 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.964422 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.964480 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.964490 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.964502 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:40 crc kubenswrapper[4663]: I1211 10:18:40.964517 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:40Z","lastTransitionTime":"2025-12-11T10:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.066529 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.066558 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.066566 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.066577 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.066585 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:41Z","lastTransitionTime":"2025-12-11T10:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.169149 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.169208 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.169218 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.169231 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.169242 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:41Z","lastTransitionTime":"2025-12-11T10:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.271597 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.271629 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.271639 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.271650 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.271657 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:41Z","lastTransitionTime":"2025-12-11T10:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.373856 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.373884 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.373924 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.373937 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.373946 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:41Z","lastTransitionTime":"2025-12-11T10:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.475694 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.475737 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.475748 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.475762 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.475771 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:41Z","lastTransitionTime":"2025-12-11T10:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.503849 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.503862 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:41 crc kubenswrapper[4663]: E1211 10:18:41.503980 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:41 crc kubenswrapper[4663]: E1211 10:18:41.504068 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.557693 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.563869 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.566007 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.575007 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.577331 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.577373 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.577384 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.577397 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.577405 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:41Z","lastTransitionTime":"2025-12-11T10:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.584058 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.595903 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:31Z\\\",\\\"message\\\":\\\"11 10:18:31.160898 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nI1211 10:18:31.160908 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nF1211 10:18:31.160914 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:31.160923 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1211 10:18:31.160928 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.604922 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.611533 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.619307 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.626701 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.632936 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.639649 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.649260 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.655799 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.663595 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.671873 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.679616 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.679638 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.679647 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.679659 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.679668 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:41Z","lastTransitionTime":"2025-12-11T10:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.679733 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.688363 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:41Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.781565 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.781589 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.781599 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.781609 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.781617 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:41Z","lastTransitionTime":"2025-12-11T10:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.882696 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.882718 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.882727 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.882737 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.882746 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:41Z","lastTransitionTime":"2025-12-11T10:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.984344 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.984374 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.984383 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.984394 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:41 crc kubenswrapper[4663]: I1211 10:18:41.984403 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:41Z","lastTransitionTime":"2025-12-11T10:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.086313 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.086399 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.086423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.086440 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.086451 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:42Z","lastTransitionTime":"2025-12-11T10:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.188151 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.188199 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.188209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.188220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.188228 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:42Z","lastTransitionTime":"2025-12-11T10:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.289900 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.289936 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.289946 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.289958 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.289967 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:42Z","lastTransitionTime":"2025-12-11T10:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.392488 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.392522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.392532 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.392544 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.392552 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:42Z","lastTransitionTime":"2025-12-11T10:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.494052 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.494103 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.494113 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.494128 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.494138 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:42Z","lastTransitionTime":"2025-12-11T10:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.504281 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:42 crc kubenswrapper[4663]: E1211 10:18:42.504449 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.504285 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:42 crc kubenswrapper[4663]: E1211 10:18:42.504627 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.596288 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.596329 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.596339 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.596352 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.596363 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:42Z","lastTransitionTime":"2025-12-11T10:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.699285 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.699329 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.699341 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.699357 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.699370 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:42Z","lastTransitionTime":"2025-12-11T10:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.800798 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.800831 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.800848 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.800859 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.800867 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:42Z","lastTransitionTime":"2025-12-11T10:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.902399 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.902434 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.902443 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.902455 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:42 crc kubenswrapper[4663]: I1211 10:18:42.902464 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:42Z","lastTransitionTime":"2025-12-11T10:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.004542 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.004578 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.004586 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.004599 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.004607 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:43Z","lastTransitionTime":"2025-12-11T10:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.106714 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.106740 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.106749 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.106759 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.106767 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:43Z","lastTransitionTime":"2025-12-11T10:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.208288 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.208325 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.208334 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.208344 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.208353 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:43Z","lastTransitionTime":"2025-12-11T10:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.310286 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.310315 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.310324 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.310336 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.310346 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:43Z","lastTransitionTime":"2025-12-11T10:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.411945 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.411977 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.411985 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.411997 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.412005 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:43Z","lastTransitionTime":"2025-12-11T10:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.503997 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:43 crc kubenswrapper[4663]: E1211 10:18:43.504106 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.503997 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:43 crc kubenswrapper[4663]: E1211 10:18:43.504170 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.513571 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.513624 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.513633 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.513649 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.513659 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:43Z","lastTransitionTime":"2025-12-11T10:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.615479 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.615520 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.615528 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.615542 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.615553 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:43Z","lastTransitionTime":"2025-12-11T10:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.717416 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.717449 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.717460 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.717473 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.717481 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:43Z","lastTransitionTime":"2025-12-11T10:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.819302 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.819340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.819350 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.819364 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.819373 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:43Z","lastTransitionTime":"2025-12-11T10:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.921498 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.921542 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.921552 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.921564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:43 crc kubenswrapper[4663]: I1211 10:18:43.921572 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:43Z","lastTransitionTime":"2025-12-11T10:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.023374 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.023401 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.023409 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.023421 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.023429 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:44Z","lastTransitionTime":"2025-12-11T10:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.125060 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.125097 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.125106 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.125117 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.125127 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:44Z","lastTransitionTime":"2025-12-11T10:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.227094 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.227124 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.227135 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.227146 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.227156 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:44Z","lastTransitionTime":"2025-12-11T10:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.329406 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.329443 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.329453 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.329473 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.329482 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:44Z","lastTransitionTime":"2025-12-11T10:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.431724 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.431758 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.431777 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.431790 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.431803 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:44Z","lastTransitionTime":"2025-12-11T10:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.503670 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.503683 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:44 crc kubenswrapper[4663]: E1211 10:18:44.503805 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:44 crc kubenswrapper[4663]: E1211 10:18:44.503877 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.533993 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.534021 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.534030 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.534041 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.534050 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:44Z","lastTransitionTime":"2025-12-11T10:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.635721 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.635763 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.635772 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.635784 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.635793 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:44Z","lastTransitionTime":"2025-12-11T10:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.737804 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.737842 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.737851 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.737865 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.737874 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:44Z","lastTransitionTime":"2025-12-11T10:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.840041 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.840071 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.840080 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.840093 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.840103 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:44Z","lastTransitionTime":"2025-12-11T10:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.941465 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.941494 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.941502 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.941513 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:44 crc kubenswrapper[4663]: I1211 10:18:44.941521 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:44Z","lastTransitionTime":"2025-12-11T10:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.043113 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.043152 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.043161 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.043174 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.043202 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:45Z","lastTransitionTime":"2025-12-11T10:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.145129 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.145163 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.145172 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.145204 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.145213 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:45Z","lastTransitionTime":"2025-12-11T10:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.247442 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.247476 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.247484 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.247500 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.247508 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:45Z","lastTransitionTime":"2025-12-11T10:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.350386 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.350415 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.350424 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.350440 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.350450 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:45Z","lastTransitionTime":"2025-12-11T10:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.453330 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.453379 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.453388 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.453403 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.453414 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:45Z","lastTransitionTime":"2025-12-11T10:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.504350 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.504512 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:45 crc kubenswrapper[4663]: E1211 10:18:45.504785 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:45 crc kubenswrapper[4663]: E1211 10:18:45.504875 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.555131 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.555168 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.555176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.555209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.555218 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:45Z","lastTransitionTime":"2025-12-11T10:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.657551 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.657591 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.657599 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.657612 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.657620 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:45Z","lastTransitionTime":"2025-12-11T10:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.759756 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.759791 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.759801 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.759815 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.759824 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:45Z","lastTransitionTime":"2025-12-11T10:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.861974 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.862035 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.862044 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.862058 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.862100 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:45Z","lastTransitionTime":"2025-12-11T10:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.964712 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.964767 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.964775 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.964791 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:45 crc kubenswrapper[4663]: I1211 10:18:45.964802 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:45Z","lastTransitionTime":"2025-12-11T10:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.066674 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.066731 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.066739 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.066752 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.066761 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:46Z","lastTransitionTime":"2025-12-11T10:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.168614 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.168663 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.168674 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.168699 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.168708 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:46Z","lastTransitionTime":"2025-12-11T10:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.270573 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.270626 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.270637 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.270651 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.270662 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:46Z","lastTransitionTime":"2025-12-11T10:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.372517 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.372552 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.372561 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.372573 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.372582 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:46Z","lastTransitionTime":"2025-12-11T10:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.475153 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.475209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.475218 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.475233 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.475243 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:46Z","lastTransitionTime":"2025-12-11T10:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.503955 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.503964 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:46 crc kubenswrapper[4663]: E1211 10:18:46.504096 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:46 crc kubenswrapper[4663]: E1211 10:18:46.504179 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.577340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.577385 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.577394 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.577408 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.577418 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:46Z","lastTransitionTime":"2025-12-11T10:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.679320 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.679363 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.679372 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.679385 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.679394 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:46Z","lastTransitionTime":"2025-12-11T10:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.781282 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.781336 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.781345 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.781356 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.781366 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:46Z","lastTransitionTime":"2025-12-11T10:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.883983 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.884022 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.884031 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.884043 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.884052 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:46Z","lastTransitionTime":"2025-12-11T10:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.985631 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.985690 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.985701 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.985713 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:46 crc kubenswrapper[4663]: I1211 10:18:46.985721 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:46Z","lastTransitionTime":"2025-12-11T10:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.087758 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.087806 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.087817 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.087832 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.087842 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:47Z","lastTransitionTime":"2025-12-11T10:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.189775 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.189813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.189821 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.189833 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.189841 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:47Z","lastTransitionTime":"2025-12-11T10:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.291760 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.291806 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.291817 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.291831 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.291841 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:47Z","lastTransitionTime":"2025-12-11T10:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.393668 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.393707 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.393716 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.393730 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.393740 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:47Z","lastTransitionTime":"2025-12-11T10:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.495502 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.495551 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.495561 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.495575 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.495584 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:47Z","lastTransitionTime":"2025-12-11T10:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.503973 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.504170 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:47 crc kubenswrapper[4663]: E1211 10:18:47.504256 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:47 crc kubenswrapper[4663]: E1211 10:18:47.504364 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.504509 4663 scope.go:117] "RemoveContainer" containerID="ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52" Dec 11 10:18:47 crc kubenswrapper[4663]: E1211 10:18:47.504663 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.598260 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.598294 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.598302 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.598314 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.598323 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:47Z","lastTransitionTime":"2025-12-11T10:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.700001 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.700028 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.700036 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.700047 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.700058 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:47Z","lastTransitionTime":"2025-12-11T10:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.802337 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.802553 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.802565 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.802580 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.802591 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:47Z","lastTransitionTime":"2025-12-11T10:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.904169 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.904221 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.904234 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.904247 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:47 crc kubenswrapper[4663]: I1211 10:18:47.904257 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:47Z","lastTransitionTime":"2025-12-11T10:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.006710 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.006745 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.006754 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.006768 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.006778 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:48Z","lastTransitionTime":"2025-12-11T10:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.111935 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.111984 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.111993 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.112006 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.112017 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:48Z","lastTransitionTime":"2025-12-11T10:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.214965 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.215008 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.215018 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.215033 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.215043 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:48Z","lastTransitionTime":"2025-12-11T10:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.316921 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.316950 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.316959 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.316970 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.316977 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:48Z","lastTransitionTime":"2025-12-11T10:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.418432 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.418479 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.418488 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.418507 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.418518 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:48Z","lastTransitionTime":"2025-12-11T10:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.504485 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.504606 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:48 crc kubenswrapper[4663]: E1211 10:18:48.504647 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:48 crc kubenswrapper[4663]: E1211 10:18:48.504817 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.520914 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.520959 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.520973 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.520994 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.521007 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:48Z","lastTransitionTime":"2025-12-11T10:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.623300 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.623339 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.623349 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.623365 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.623380 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:48Z","lastTransitionTime":"2025-12-11T10:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.725503 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.725560 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.725576 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.725593 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.725604 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:48Z","lastTransitionTime":"2025-12-11T10:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.828211 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.828264 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.828274 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.828296 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.828306 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:48Z","lastTransitionTime":"2025-12-11T10:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.930721 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.930754 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.930782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.930794 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:48 crc kubenswrapper[4663]: I1211 10:18:48.930803 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:48Z","lastTransitionTime":"2025-12-11T10:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.033406 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.033440 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.033449 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.033461 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.033470 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:49Z","lastTransitionTime":"2025-12-11T10:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.135705 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.135754 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.135764 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.135775 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.135785 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:49Z","lastTransitionTime":"2025-12-11T10:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.237717 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.237767 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.237776 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.237788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.237799 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:49Z","lastTransitionTime":"2025-12-11T10:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.340413 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.340451 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.340460 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.340481 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.340493 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:49Z","lastTransitionTime":"2025-12-11T10:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.442684 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.442722 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.442730 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.442744 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.442754 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:49Z","lastTransitionTime":"2025-12-11T10:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.503652 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.503664 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:49 crc kubenswrapper[4663]: E1211 10:18:49.503788 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:49 crc kubenswrapper[4663]: E1211 10:18:49.503892 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.514437 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8746ac1c-332c-475c-bf39-1c4dbd584637\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dbc58fcfdfe62d2170360ef7d64580a71e7d25d21800257757b80a2bccff9a5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d29ef8426056a5691774eaa72cd716c08a424df3a0b426e1d7897d839230622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://625fddf0941d4a6bb39e6e05fa5888872b1e22f59fde1ffcbcb9990def6a4194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.525456 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.536549 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.545296 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.545327 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.545337 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.545351 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.545360 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:49Z","lastTransitionTime":"2025-12-11T10:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.551102 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:31Z\\\",\\\"message\\\":\\\"11 10:18:31.160898 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nI1211 10:18:31.160908 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nF1211 10:18:31.160914 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:31.160923 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1211 10:18:31.160928 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.559855 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.570260 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.584659 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.595106 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.605323 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.613347 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.622494 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.631595 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.639654 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.647539 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.647574 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.647583 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.647596 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.647605 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:49Z","lastTransitionTime":"2025-12-11T10:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.649154 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.659758 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.668150 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.677487 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:49Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.750088 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.750209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.750223 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.750243 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.750256 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:49Z","lastTransitionTime":"2025-12-11T10:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.852077 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.852133 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.852144 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.852158 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.852168 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:49Z","lastTransitionTime":"2025-12-11T10:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.954762 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.954802 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.954810 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.954825 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:49 crc kubenswrapper[4663]: I1211 10:18:49.954835 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:49Z","lastTransitionTime":"2025-12-11T10:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.056935 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.056992 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.057002 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.057018 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.057030 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.158723 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.158766 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.158774 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.158789 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.158800 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.261677 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.261723 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.261733 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.261749 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.261761 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.363806 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.363872 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.363885 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.363900 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.363909 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.466080 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.466174 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.466212 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.466231 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.466243 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.504459 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.504503 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:50 crc kubenswrapper[4663]: E1211 10:18:50.504571 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:50 crc kubenswrapper[4663]: E1211 10:18:50.504655 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.568150 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.568203 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.568212 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.568224 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.568232 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.671017 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.671061 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.671070 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.671083 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.671094 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.723306 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.723345 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.723355 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.723371 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.723381 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: E1211 10:18:50.733580 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:50Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.736862 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.736889 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.736897 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.736908 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.736917 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: E1211 10:18:50.745932 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:50Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.748888 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.748939 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.748948 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.748960 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.748967 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: E1211 10:18:50.758399 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:50Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.760761 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.760799 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.760809 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.760823 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.760832 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: E1211 10:18:50.769052 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:50Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.771570 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.771600 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.771611 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.771621 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.771629 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: E1211 10:18:50.779422 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:50Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:50 crc kubenswrapper[4663]: E1211 10:18:50.779570 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.780626 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.780661 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.780674 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.780685 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.780695 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.883236 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.883268 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.883277 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.883292 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.883303 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.985284 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.985327 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.985337 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.985351 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:50 crc kubenswrapper[4663]: I1211 10:18:50.985361 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:50Z","lastTransitionTime":"2025-12-11T10:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.087730 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.087767 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.087775 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.087791 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.087801 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:51Z","lastTransitionTime":"2025-12-11T10:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.189687 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.189716 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.189724 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.189736 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.189744 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:51Z","lastTransitionTime":"2025-12-11T10:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.291961 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.291986 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.291994 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.292005 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.292011 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:51Z","lastTransitionTime":"2025-12-11T10:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.393884 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.393917 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.393927 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.393942 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.393950 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:51Z","lastTransitionTime":"2025-12-11T10:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.496424 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.496494 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.496508 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.496532 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.496548 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:51Z","lastTransitionTime":"2025-12-11T10:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.503678 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.503734 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:51 crc kubenswrapper[4663]: E1211 10:18:51.503807 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:51 crc kubenswrapper[4663]: E1211 10:18:51.503902 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.598650 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.598688 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.598700 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.598717 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.598730 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:51Z","lastTransitionTime":"2025-12-11T10:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.701036 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.701084 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.701093 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.701112 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.701123 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:51Z","lastTransitionTime":"2025-12-11T10:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.803209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.803248 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.803256 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.803271 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.803281 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:51Z","lastTransitionTime":"2025-12-11T10:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.905882 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.905926 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.905938 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.905952 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:51 crc kubenswrapper[4663]: I1211 10:18:51.905962 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:51Z","lastTransitionTime":"2025-12-11T10:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.008348 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.008391 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.008399 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.008411 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.008433 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:52Z","lastTransitionTime":"2025-12-11T10:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.110786 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.110854 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.110868 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.110892 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.110911 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:52Z","lastTransitionTime":"2025-12-11T10:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.213465 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.213525 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.213535 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.213551 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.213570 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:52Z","lastTransitionTime":"2025-12-11T10:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.314700 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:52 crc kubenswrapper[4663]: E1211 10:18:52.314838 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:52 crc kubenswrapper[4663]: E1211 10:18:52.314889 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs podName:54c64914-8d10-48bc-a755-1f7257400a32 nodeName:}" failed. No retries permitted until 2025-12-11 10:19:24.314876236 +0000 UTC m=+95.132806020 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs") pod "network-metrics-daemon-5r5q6" (UID: "54c64914-8d10-48bc-a755-1f7257400a32") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.315951 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.315979 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.315990 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.316003 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.316013 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:52Z","lastTransitionTime":"2025-12-11T10:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.422176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.422270 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.422286 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.422312 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.422331 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:52Z","lastTransitionTime":"2025-12-11T10:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.504145 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.504236 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:52 crc kubenswrapper[4663]: E1211 10:18:52.504333 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:52 crc kubenswrapper[4663]: E1211 10:18:52.504417 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.524904 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.524934 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.524946 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.524967 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.524981 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:52Z","lastTransitionTime":"2025-12-11T10:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.627559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.627585 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.627593 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.627608 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.627619 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:52Z","lastTransitionTime":"2025-12-11T10:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.729768 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.729811 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.729820 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.729855 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.729865 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:52Z","lastTransitionTime":"2025-12-11T10:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.832330 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.832546 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.832611 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.832685 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.832737 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:52Z","lastTransitionTime":"2025-12-11T10:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.935221 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.935292 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.935305 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.935324 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:52 crc kubenswrapper[4663]: I1211 10:18:52.935344 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:52Z","lastTransitionTime":"2025-12-11T10:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.037590 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.037644 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.037658 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.037682 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.037695 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:53Z","lastTransitionTime":"2025-12-11T10:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.141990 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.142023 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.142033 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.142046 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.142053 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:53Z","lastTransitionTime":"2025-12-11T10:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.244450 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.244500 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.244509 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.244525 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.244539 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:53Z","lastTransitionTime":"2025-12-11T10:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.346699 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.346735 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.346743 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.346757 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.346767 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:53Z","lastTransitionTime":"2025-12-11T10:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.449176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.449247 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.449260 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.449280 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.449289 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:53Z","lastTransitionTime":"2025-12-11T10:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.503962 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.504065 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:53 crc kubenswrapper[4663]: E1211 10:18:53.504128 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:53 crc kubenswrapper[4663]: E1211 10:18:53.504307 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.551788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.551834 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.551848 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.551868 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.551880 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:53Z","lastTransitionTime":"2025-12-11T10:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.654641 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.654677 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.654686 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.654700 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.654711 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:53Z","lastTransitionTime":"2025-12-11T10:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.756515 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.756569 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.756580 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.756593 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.756602 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:53Z","lastTransitionTime":"2025-12-11T10:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.806321 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-qv7jl_43cbb02f-aa8f-41aa-bd0a-8042aca3002c/kube-multus/0.log" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.806409 4663 generic.go:334] "Generic (PLEG): container finished" podID="43cbb02f-aa8f-41aa-bd0a-8042aca3002c" containerID="fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c" exitCode=1 Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.806451 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-qv7jl" event={"ID":"43cbb02f-aa8f-41aa-bd0a-8042aca3002c","Type":"ContainerDied","Data":"fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c"} Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.807012 4663 scope.go:117] "RemoveContainer" containerID="fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.819219 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.829663 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.839457 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.847946 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.857880 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.859708 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.859752 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.859763 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.859782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.859792 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:53Z","lastTransitionTime":"2025-12-11T10:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.867022 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:53Z\\\",\\\"message\\\":\\\"2025-12-11T10:18:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2\\\\n2025-12-11T10:18:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2 to /host/opt/cni/bin/\\\\n2025-12-11T10:18:08Z [verbose] multus-daemon started\\\\n2025-12-11T10:18:08Z [verbose] Readiness Indicator file check\\\\n2025-12-11T10:18:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.874220 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.885502 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.896084 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.904431 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.913491 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.923256 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.931779 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8746ac1c-332c-475c-bf39-1c4dbd584637\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dbc58fcfdfe62d2170360ef7d64580a71e7d25d21800257757b80a2bccff9a5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d29ef8426056a5691774eaa72cd716c08a424df3a0b426e1d7897d839230622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://625fddf0941d4a6bb39e6e05fa5888872b1e22f59fde1ffcbcb9990def6a4194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.940903 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.948540 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.961668 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:31Z\\\",\\\"message\\\":\\\"11 10:18:31.160898 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nI1211 10:18:31.160908 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nF1211 10:18:31.160914 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:31.160923 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1211 10:18:31.160928 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.967126 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.967172 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.967209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.967237 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.967248 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:53Z","lastTransitionTime":"2025-12-11T10:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:53 crc kubenswrapper[4663]: I1211 10:18:53.983507 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:53Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.069742 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.069777 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.069788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.069803 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.069813 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:54Z","lastTransitionTime":"2025-12-11T10:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.172806 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.172877 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.172890 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.172910 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.172922 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:54Z","lastTransitionTime":"2025-12-11T10:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.275813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.275873 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.275887 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.275905 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.275919 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:54Z","lastTransitionTime":"2025-12-11T10:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.378834 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.378894 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.378907 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.378926 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.378937 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:54Z","lastTransitionTime":"2025-12-11T10:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.482751 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.482825 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.482841 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.482912 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.482930 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:54Z","lastTransitionTime":"2025-12-11T10:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.504250 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.504327 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:54 crc kubenswrapper[4663]: E1211 10:18:54.504395 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:54 crc kubenswrapper[4663]: E1211 10:18:54.504505 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.586043 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.586088 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.586099 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.586117 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.586133 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:54Z","lastTransitionTime":"2025-12-11T10:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.689687 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.689730 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.689740 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.689754 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.689765 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:54Z","lastTransitionTime":"2025-12-11T10:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.792552 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.792609 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.792618 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.792641 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.792654 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:54Z","lastTransitionTime":"2025-12-11T10:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.811605 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-qv7jl_43cbb02f-aa8f-41aa-bd0a-8042aca3002c/kube-multus/0.log" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.811668 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-qv7jl" event={"ID":"43cbb02f-aa8f-41aa-bd0a-8042aca3002c","Type":"ContainerStarted","Data":"60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73"} Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.826871 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8746ac1c-332c-475c-bf39-1c4dbd584637\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dbc58fcfdfe62d2170360ef7d64580a71e7d25d21800257757b80a2bccff9a5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d29ef8426056a5691774eaa72cd716c08a424df3a0b426e1d7897d839230622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://625fddf0941d4a6bb39e6e05fa5888872b1e22f59fde1ffcbcb9990def6a4194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.843109 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:31Z\\\",\\\"message\\\":\\\"11 10:18:31.160898 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nI1211 10:18:31.160908 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nF1211 10:18:31.160914 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:31.160923 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1211 10:18:31.160928 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.854167 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.866803 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.878582 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.889383 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.894684 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.894725 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.894738 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.894755 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.894765 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:54Z","lastTransitionTime":"2025-12-11T10:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.902336 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:53Z\\\",\\\"message\\\":\\\"2025-12-11T10:18:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2\\\\n2025-12-11T10:18:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2 to /host/opt/cni/bin/\\\\n2025-12-11T10:18:08Z [verbose] multus-daemon started\\\\n2025-12-11T10:18:08Z [verbose] Readiness Indicator file check\\\\n2025-12-11T10:18:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.910938 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.922228 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.933202 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.944538 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.954503 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.966489 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.984696 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.995004 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:54Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.996658 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.996694 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.996704 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.996720 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:54 crc kubenswrapper[4663]: I1211 10:18:54.996730 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:54Z","lastTransitionTime":"2025-12-11T10:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.004545 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:55Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.013674 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:55Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.098764 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.098799 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.098809 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.098827 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.098840 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:55Z","lastTransitionTime":"2025-12-11T10:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.201632 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.201870 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.201934 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.202030 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.202096 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:55Z","lastTransitionTime":"2025-12-11T10:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.304745 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.304786 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.304796 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.304814 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.304826 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:55Z","lastTransitionTime":"2025-12-11T10:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.407815 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.407856 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.407867 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.407883 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.407895 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:55Z","lastTransitionTime":"2025-12-11T10:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.503984 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.504050 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:55 crc kubenswrapper[4663]: E1211 10:18:55.504677 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:55 crc kubenswrapper[4663]: E1211 10:18:55.504677 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.509746 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.509778 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.509786 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.509798 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.509810 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:55Z","lastTransitionTime":"2025-12-11T10:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.612230 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.612274 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.612286 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.612303 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.612314 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:55Z","lastTransitionTime":"2025-12-11T10:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.714815 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.715073 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.715150 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.715286 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.715353 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:55Z","lastTransitionTime":"2025-12-11T10:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.817362 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.817403 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.817414 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.817427 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.817440 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:55Z","lastTransitionTime":"2025-12-11T10:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.919831 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.919867 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.919875 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.919889 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:55 crc kubenswrapper[4663]: I1211 10:18:55.919898 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:55Z","lastTransitionTime":"2025-12-11T10:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.023134 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.023215 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.023227 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.023257 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.023266 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:56Z","lastTransitionTime":"2025-12-11T10:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.126159 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.126220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.126241 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.126255 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.126263 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:56Z","lastTransitionTime":"2025-12-11T10:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.228615 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.228658 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.228667 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.228681 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.228692 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:56Z","lastTransitionTime":"2025-12-11T10:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.330067 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.330092 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.330100 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.330111 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.330119 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:56Z","lastTransitionTime":"2025-12-11T10:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.432907 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.433265 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.433350 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.433443 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.433512 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:56Z","lastTransitionTime":"2025-12-11T10:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.504509 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.504594 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:56 crc kubenswrapper[4663]: E1211 10:18:56.504649 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:56 crc kubenswrapper[4663]: E1211 10:18:56.504733 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.536227 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.536256 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.536265 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.536279 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.536289 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:56Z","lastTransitionTime":"2025-12-11T10:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.638245 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.638302 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.638311 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.638327 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.638338 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:56Z","lastTransitionTime":"2025-12-11T10:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.740619 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.740923 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.741006 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.741086 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.741160 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:56Z","lastTransitionTime":"2025-12-11T10:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.844138 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.844225 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.844238 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.844258 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.844271 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:56Z","lastTransitionTime":"2025-12-11T10:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.946803 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.946840 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.946850 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.946866 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:56 crc kubenswrapper[4663]: I1211 10:18:56.946878 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:56Z","lastTransitionTime":"2025-12-11T10:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.049835 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.049891 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.049901 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.049917 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.049930 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:57Z","lastTransitionTime":"2025-12-11T10:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.152411 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.152454 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.152465 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.152483 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.152494 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:57Z","lastTransitionTime":"2025-12-11T10:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.254578 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.254624 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.254636 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.254651 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.254662 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:57Z","lastTransitionTime":"2025-12-11T10:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.357083 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.357146 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.357156 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.357197 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.357209 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:57Z","lastTransitionTime":"2025-12-11T10:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.459506 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.459557 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.459566 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.459583 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.459595 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:57Z","lastTransitionTime":"2025-12-11T10:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.504396 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.504463 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:57 crc kubenswrapper[4663]: E1211 10:18:57.504513 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:57 crc kubenswrapper[4663]: E1211 10:18:57.504625 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.562248 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.562296 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.562307 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.562322 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.562335 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:57Z","lastTransitionTime":"2025-12-11T10:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.664987 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.665017 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.665025 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.665042 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.665053 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:57Z","lastTransitionTime":"2025-12-11T10:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.766506 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.766541 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.766549 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.766562 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.766571 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:57Z","lastTransitionTime":"2025-12-11T10:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.868038 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.868069 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.868077 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.868089 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.868099 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:57Z","lastTransitionTime":"2025-12-11T10:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.969775 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.969813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.969829 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.969844 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:57 crc kubenswrapper[4663]: I1211 10:18:57.969857 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:57Z","lastTransitionTime":"2025-12-11T10:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.072151 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.072256 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.072266 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.072289 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.072300 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:58Z","lastTransitionTime":"2025-12-11T10:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.173846 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.173877 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.173885 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.173898 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.173907 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:58Z","lastTransitionTime":"2025-12-11T10:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.276463 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.276510 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.276519 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.276534 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.276554 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:58Z","lastTransitionTime":"2025-12-11T10:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.379110 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.379151 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.379161 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.379173 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.379211 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:58Z","lastTransitionTime":"2025-12-11T10:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.481666 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.481705 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.481714 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.481728 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.481737 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:58Z","lastTransitionTime":"2025-12-11T10:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.504210 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.504229 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:18:58 crc kubenswrapper[4663]: E1211 10:18:58.504317 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:18:58 crc kubenswrapper[4663]: E1211 10:18:58.504469 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.584035 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.584070 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.584080 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.584094 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.584105 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:58Z","lastTransitionTime":"2025-12-11T10:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.686165 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.686247 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.686259 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.686272 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.686283 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:58Z","lastTransitionTime":"2025-12-11T10:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.788894 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.788946 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.788957 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.788973 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.788984 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:58Z","lastTransitionTime":"2025-12-11T10:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.891024 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.891063 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.891072 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.891085 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.891094 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:58Z","lastTransitionTime":"2025-12-11T10:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.992345 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.992382 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.992391 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.992405 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:58 crc kubenswrapper[4663]: I1211 10:18:58.992413 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:58Z","lastTransitionTime":"2025-12-11T10:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.094030 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.094079 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.094089 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.094100 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.094109 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:59Z","lastTransitionTime":"2025-12-11T10:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.195916 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.195947 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.195955 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.195967 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.195976 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:59Z","lastTransitionTime":"2025-12-11T10:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.297922 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.297953 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.297961 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.297971 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.297979 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:59Z","lastTransitionTime":"2025-12-11T10:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.399668 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.399694 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.399703 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.399712 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.399721 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:59Z","lastTransitionTime":"2025-12-11T10:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.501988 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.502022 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.502030 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.502043 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.502052 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:59Z","lastTransitionTime":"2025-12-11T10:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.504278 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.504384 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:18:59 crc kubenswrapper[4663]: E1211 10:18:59.504488 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:18:59 crc kubenswrapper[4663]: E1211 10:18:59.504549 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.514938 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8746ac1c-332c-475c-bf39-1c4dbd584637\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dbc58fcfdfe62d2170360ef7d64580a71e7d25d21800257757b80a2bccff9a5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d29ef8426056a5691774eaa72cd716c08a424df3a0b426e1d7897d839230622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://625fddf0941d4a6bb39e6e05fa5888872b1e22f59fde1ffcbcb9990def6a4194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.528721 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:31Z\\\",\\\"message\\\":\\\"11 10:18:31.160898 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nI1211 10:18:31.160908 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nF1211 10:18:31.160914 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:31.160923 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1211 10:18:31.160928 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.537435 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.546759 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.554132 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.561286 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.570359 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:53Z\\\",\\\"message\\\":\\\"2025-12-11T10:18:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2\\\\n2025-12-11T10:18:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2 to /host/opt/cni/bin/\\\\n2025-12-11T10:18:08Z [verbose] multus-daemon started\\\\n2025-12-11T10:18:08Z [verbose] Readiness Indicator file check\\\\n2025-12-11T10:18:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.582521 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.594398 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.603779 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.603813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.603824 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.603839 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.603850 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:59Z","lastTransitionTime":"2025-12-11T10:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.604615 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.614022 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.623585 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.632821 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.640062 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.648521 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.657214 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.665108 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:59Z is after 2025-08-24T17:21:41Z" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.705998 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.706034 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.706043 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.706058 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.706067 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:59Z","lastTransitionTime":"2025-12-11T10:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.807362 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.807397 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.807407 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.807423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.807433 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:59Z","lastTransitionTime":"2025-12-11T10:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.908858 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.908896 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.908906 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.908921 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:18:59 crc kubenswrapper[4663]: I1211 10:18:59.908931 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:18:59Z","lastTransitionTime":"2025-12-11T10:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.012174 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.012237 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.012247 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.012262 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.012272 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.114795 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.114835 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.114845 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.114859 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.114870 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.217325 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.217356 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.217365 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.217378 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.217388 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.319743 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.319797 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.319806 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.319821 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.319831 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.422140 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.422173 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.422207 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.422220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.422230 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.504101 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.504142 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:00 crc kubenswrapper[4663]: E1211 10:19:00.504259 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:00 crc kubenswrapper[4663]: E1211 10:19:00.504438 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.524398 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.524438 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.524448 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.524463 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.524473 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.626674 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.626713 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.626722 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.626739 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.626750 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.728358 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.728545 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.728609 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.728678 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.728745 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.830861 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.830890 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.830899 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.830911 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.830919 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.924533 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.924591 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.924603 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.924621 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.924640 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: E1211 10:19:00.935746 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:00Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.939740 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.939784 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.939796 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.939810 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.939821 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: E1211 10:19:00.950563 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:00Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.953975 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.954028 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.954039 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.954051 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.954059 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: E1211 10:19:00.963823 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:00Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.966903 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.966947 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.966958 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.966972 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.966981 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: E1211 10:19:00.976804 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:00Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.979818 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.979889 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.979899 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.979910 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.979919 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:00 crc kubenswrapper[4663]: E1211 10:19:00.988670 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:00Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:00 crc kubenswrapper[4663]: E1211 10:19:00.988777 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.990331 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.990387 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.990397 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.990414 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:00 crc kubenswrapper[4663]: I1211 10:19:00.990425 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:00Z","lastTransitionTime":"2025-12-11T10:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.092604 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.092650 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.092660 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.092677 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.092687 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:01Z","lastTransitionTime":"2025-12-11T10:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.194618 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.194652 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.194660 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.194676 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.194686 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:01Z","lastTransitionTime":"2025-12-11T10:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.296753 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.296795 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.296803 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.296817 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.296827 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:01Z","lastTransitionTime":"2025-12-11T10:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.399045 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.399082 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.399091 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.399106 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.399118 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:01Z","lastTransitionTime":"2025-12-11T10:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.500651 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.501118 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.501242 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.501316 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.501393 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:01Z","lastTransitionTime":"2025-12-11T10:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.504069 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.504126 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:01 crc kubenswrapper[4663]: E1211 10:19:01.504211 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:01 crc kubenswrapper[4663]: E1211 10:19:01.504301 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.504865 4663 scope.go:117] "RemoveContainer" containerID="ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.605262 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.605352 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.605621 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.607175 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.607228 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:01Z","lastTransitionTime":"2025-12-11T10:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.710524 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.710559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.710568 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.710580 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.710590 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:01Z","lastTransitionTime":"2025-12-11T10:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.813730 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.813788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.813797 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.813813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.813844 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:01Z","lastTransitionTime":"2025-12-11T10:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.838407 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/2.log" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.841618 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerStarted","Data":"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e"} Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.842009 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.861069 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.872057 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.888975 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:31Z\\\",\\\"message\\\":\\\"11 10:18:31.160898 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nI1211 10:18:31.160908 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nF1211 10:18:31.160914 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:31.160923 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1211 10:18:31.160928 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.898072 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.911071 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.916738 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.916790 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.916800 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.916815 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.916840 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:01Z","lastTransitionTime":"2025-12-11T10:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.923528 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.932116 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.940241 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.949549 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:53Z\\\",\\\"message\\\":\\\"2025-12-11T10:18:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2\\\\n2025-12-11T10:18:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2 to /host/opt/cni/bin/\\\\n2025-12-11T10:18:08Z [verbose] multus-daemon started\\\\n2025-12-11T10:18:08Z [verbose] Readiness Indicator file check\\\\n2025-12-11T10:18:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.958223 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.967889 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.977026 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.987965 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:01 crc kubenswrapper[4663]: I1211 10:19:01.997245 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:01Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.008061 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.015791 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.018590 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.018613 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.018622 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.018636 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.018646 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:02Z","lastTransitionTime":"2025-12-11T10:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.024718 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8746ac1c-332c-475c-bf39-1c4dbd584637\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dbc58fcfdfe62d2170360ef7d64580a71e7d25d21800257757b80a2bccff9a5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d29ef8426056a5691774eaa72cd716c08a424df3a0b426e1d7897d839230622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://625fddf0941d4a6bb39e6e05fa5888872b1e22f59fde1ffcbcb9990def6a4194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.120366 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.120409 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.120419 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.120435 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.120455 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:02Z","lastTransitionTime":"2025-12-11T10:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.222363 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.222397 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.222405 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.222417 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.222425 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:02Z","lastTransitionTime":"2025-12-11T10:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.324212 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.324247 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.324257 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.324268 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.324278 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:02Z","lastTransitionTime":"2025-12-11T10:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.426035 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.426069 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.426078 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.426089 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.426099 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:02Z","lastTransitionTime":"2025-12-11T10:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.503696 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.503772 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:02 crc kubenswrapper[4663]: E1211 10:19:02.503870 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:02 crc kubenswrapper[4663]: E1211 10:19:02.504004 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.528034 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.528062 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.528071 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.528082 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.528090 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:02Z","lastTransitionTime":"2025-12-11T10:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.630226 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.630254 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.630262 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.630275 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.630283 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:02Z","lastTransitionTime":"2025-12-11T10:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.732433 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.732690 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.732698 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.732709 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.732718 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:02Z","lastTransitionTime":"2025-12-11T10:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.834475 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.834534 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.834543 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.834559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.834568 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:02Z","lastTransitionTime":"2025-12-11T10:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.846104 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/3.log" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.846606 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/2.log" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.848772 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e" exitCode=1 Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.848802 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e"} Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.848833 4663 scope.go:117] "RemoveContainer" containerID="ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.849292 4663 scope.go:117] "RemoveContainer" containerID="0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e" Dec 11 10:19:02 crc kubenswrapper[4663]: E1211 10:19:02.849433 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.861324 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.871512 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.888081 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.898950 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.906083 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.915118 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.925361 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8746ac1c-332c-475c-bf39-1c4dbd584637\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dbc58fcfdfe62d2170360ef7d64580a71e7d25d21800257757b80a2bccff9a5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d29ef8426056a5691774eaa72cd716c08a424df3a0b426e1d7897d839230622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://625fddf0941d4a6bb39e6e05fa5888872b1e22f59fde1ffcbcb9990def6a4194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.937523 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.937552 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.937560 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.937574 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.937584 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:02Z","lastTransitionTime":"2025-12-11T10:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.937791 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.947905 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.962024 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec884ca4a06220486834fb7dad1a97dae6026a74e1f4fa2c57ce504fd3f79a52\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:31Z\\\",\\\"message\\\":\\\"11 10:18:31.160898 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nI1211 10:18:31.160908 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6\\\\nF1211 10:18:31.160914 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:18:31Z is after 2025-08-24T17:21:41Z]\\\\nI1211 10:18:31.160923 6304 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1211 10:18:31.160928 6304 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:19:02Z\\\",\\\"message\\\":\\\"ent-go/informers/factory.go:160\\\\nI1211 10:19:02.168532 6725 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1211 10:19:02.168730 6725 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1211 10:19:02.168789 6725 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1211 10:19:02.168889 6725 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 10:19:02.168794 6725 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 10:19:02.169519 6725 factory.go:656] Stopping watch factory\\\\nI1211 10:19:02.189222 6725 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1211 10:19:02.189233 6725 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1211 10:19:02.189265 6725 ovnkube.go:599] Stopped ovnkube\\\\nI1211 10:19:02.189287 6725 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1211 10:19:02.189346 6725 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.971900 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.980469 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.989142 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:02 crc kubenswrapper[4663]: I1211 10:19:02.997601 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:02Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.005849 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:53Z\\\",\\\"message\\\":\\\"2025-12-11T10:18:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2\\\\n2025-12-11T10:18:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2 to /host/opt/cni/bin/\\\\n2025-12-11T10:18:08Z [verbose] multus-daemon started\\\\n2025-12-11T10:18:08Z [verbose] Readiness Indicator file check\\\\n2025-12-11T10:18:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.013499 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.021890 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.039818 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.039847 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.039855 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.039869 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.039879 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:03Z","lastTransitionTime":"2025-12-11T10:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.142256 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.142280 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.142287 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.142300 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.142311 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:03Z","lastTransitionTime":"2025-12-11T10:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.244517 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.244554 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.244563 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.244576 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.244585 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:03Z","lastTransitionTime":"2025-12-11T10:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.347240 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.347274 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.347282 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.347295 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.347307 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:03Z","lastTransitionTime":"2025-12-11T10:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.449487 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.449518 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.449526 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.449547 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.449557 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:03Z","lastTransitionTime":"2025-12-11T10:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.505919 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:03 crc kubenswrapper[4663]: E1211 10:19:03.506012 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.506515 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:03 crc kubenswrapper[4663]: E1211 10:19:03.506576 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.513980 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.551006 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.551042 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.551080 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.551091 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.551100 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:03Z","lastTransitionTime":"2025-12-11T10:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.653047 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.653082 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.653091 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.653104 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.653113 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:03Z","lastTransitionTime":"2025-12-11T10:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.754635 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.754663 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.754670 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.754685 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.754696 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:03Z","lastTransitionTime":"2025-12-11T10:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.852727 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/3.log" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.855740 4663 scope.go:117] "RemoveContainer" containerID="0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e" Dec 11 10:19:03 crc kubenswrapper[4663]: E1211 10:19:03.855908 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.855967 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.855989 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.855997 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.856007 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.856014 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:03Z","lastTransitionTime":"2025-12-11T10:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.866765 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:53Z\\\",\\\"message\\\":\\\"2025-12-11T10:18:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2\\\\n2025-12-11T10:18:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2 to /host/opt/cni/bin/\\\\n2025-12-11T10:18:08Z [verbose] multus-daemon started\\\\n2025-12-11T10:18:08Z [verbose] Readiness Indicator file check\\\\n2025-12-11T10:18:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.874432 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.884063 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.892886 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.901191 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.909945 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.919908 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.929048 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.937116 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.947534 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.955999 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.957595 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.957629 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.957639 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.957654 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.957664 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:03Z","lastTransitionTime":"2025-12-11T10:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.965159 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.972669 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8746ac1c-332c-475c-bf39-1c4dbd584637\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dbc58fcfdfe62d2170360ef7d64580a71e7d25d21800257757b80a2bccff9a5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d29ef8426056a5691774eaa72cd716c08a424df3a0b426e1d7897d839230622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://625fddf0941d4a6bb39e6e05fa5888872b1e22f59fde1ffcbcb9990def6a4194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.979367 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ed97342-45c6-471f-97f2-7dc6ca0d0e63\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06faf32fe966c534260438465b3da92def0fd10908c333aa992567764401d34a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d781e969fff35a9312c0d9cb5e96a751741064ad0bae9e814ceaf39220f7500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d781e969fff35a9312c0d9cb5e96a751741064ad0bae9e814ceaf39220f7500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.986700 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:03 crc kubenswrapper[4663]: I1211 10:19:03.995510 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:03Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.003263 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:04Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.015406 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:19:02Z\\\",\\\"message\\\":\\\"ent-go/informers/factory.go:160\\\\nI1211 10:19:02.168532 6725 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1211 10:19:02.168730 6725 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1211 10:19:02.168789 6725 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1211 10:19:02.168889 6725 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 10:19:02.168794 6725 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 10:19:02.169519 6725 factory.go:656] Stopping watch factory\\\\nI1211 10:19:02.189222 6725 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1211 10:19:02.189233 6725 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1211 10:19:02.189265 6725 ovnkube.go:599] Stopped ovnkube\\\\nI1211 10:19:02.189287 6725 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1211 10:19:02.189346 6725 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:19:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:04Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.060061 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.060086 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.060094 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.060106 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.060116 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:04Z","lastTransitionTime":"2025-12-11T10:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.162353 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.162386 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.162396 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.162408 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.162415 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:04Z","lastTransitionTime":"2025-12-11T10:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.263729 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.263756 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.263764 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.263774 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.263782 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:04Z","lastTransitionTime":"2025-12-11T10:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.365831 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.365937 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.366022 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.366086 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.366144 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:04Z","lastTransitionTime":"2025-12-11T10:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.467850 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.467983 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.468041 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.468105 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.468164 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:04Z","lastTransitionTime":"2025-12-11T10:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.504160 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.504218 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:04 crc kubenswrapper[4663]: E1211 10:19:04.504264 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:04 crc kubenswrapper[4663]: E1211 10:19:04.504339 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.569957 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.569986 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.569995 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.570004 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.570011 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:04Z","lastTransitionTime":"2025-12-11T10:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.671395 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.671428 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.671437 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.671448 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.671457 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:04Z","lastTransitionTime":"2025-12-11T10:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.773231 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.773270 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.773280 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.773294 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.773305 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:04Z","lastTransitionTime":"2025-12-11T10:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.875285 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.875315 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.875324 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.875336 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.875344 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:04Z","lastTransitionTime":"2025-12-11T10:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.977404 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.977457 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.977468 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.977479 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:04 crc kubenswrapper[4663]: I1211 10:19:04.977489 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:04Z","lastTransitionTime":"2025-12-11T10:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.079291 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.079324 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.079333 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.079346 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.079355 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:05Z","lastTransitionTime":"2025-12-11T10:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.180843 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.180883 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.180892 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.180905 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.180916 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:05Z","lastTransitionTime":"2025-12-11T10:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.283418 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.283457 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.283467 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.283482 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.283493 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:05Z","lastTransitionTime":"2025-12-11T10:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.385735 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.385766 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.385775 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.385805 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.385812 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:05Z","lastTransitionTime":"2025-12-11T10:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.488142 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.488199 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.488208 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.488223 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.488233 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:05Z","lastTransitionTime":"2025-12-11T10:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.504417 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.504484 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:05 crc kubenswrapper[4663]: E1211 10:19:05.504522 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:05 crc kubenswrapper[4663]: E1211 10:19:05.504587 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.589859 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.589893 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.589901 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.589912 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.589920 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:05Z","lastTransitionTime":"2025-12-11T10:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.692382 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.692425 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.692436 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.692467 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.692481 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:05Z","lastTransitionTime":"2025-12-11T10:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.794579 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.794625 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.794635 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.794648 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.794659 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:05Z","lastTransitionTime":"2025-12-11T10:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.896709 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.896760 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.896768 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.896780 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.896788 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:05Z","lastTransitionTime":"2025-12-11T10:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.998404 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.998435 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.998445 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.998458 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:05 crc kubenswrapper[4663]: I1211 10:19:05.998466 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:05Z","lastTransitionTime":"2025-12-11T10:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.101071 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.101105 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.101114 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.101130 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.101139 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:06Z","lastTransitionTime":"2025-12-11T10:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.203555 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.203580 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.203587 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.203596 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.203604 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:06Z","lastTransitionTime":"2025-12-11T10:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.305731 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.305781 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.305791 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.305803 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.305813 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:06Z","lastTransitionTime":"2025-12-11T10:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.408023 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.408081 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.408090 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.408103 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.408112 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:06Z","lastTransitionTime":"2025-12-11T10:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.503923 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.503940 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:06 crc kubenswrapper[4663]: E1211 10:19:06.504046 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:06 crc kubenswrapper[4663]: E1211 10:19:06.504134 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.510319 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.510340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.510348 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.510359 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.510369 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:06Z","lastTransitionTime":"2025-12-11T10:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.612957 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.612986 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.612998 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.613010 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.613021 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:06Z","lastTransitionTime":"2025-12-11T10:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.714626 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.714658 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.714666 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.714678 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.714687 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:06Z","lastTransitionTime":"2025-12-11T10:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.817411 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.817449 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.817458 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.817471 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.817481 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:06Z","lastTransitionTime":"2025-12-11T10:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.919326 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.919369 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.919378 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.919395 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:06 crc kubenswrapper[4663]: I1211 10:19:06.919404 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:06Z","lastTransitionTime":"2025-12-11T10:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.021240 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.021280 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.021290 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.021305 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.021315 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:07Z","lastTransitionTime":"2025-12-11T10:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.123331 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.123363 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.123370 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.123383 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.123392 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:07Z","lastTransitionTime":"2025-12-11T10:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.225492 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.225525 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.225535 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.225548 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.225559 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:07Z","lastTransitionTime":"2025-12-11T10:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.327373 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.327399 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.327408 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.327421 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.327431 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:07Z","lastTransitionTime":"2025-12-11T10:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.428929 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.428982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.428999 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.429020 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.429034 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:07Z","lastTransitionTime":"2025-12-11T10:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.503942 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.504018 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:07 crc kubenswrapper[4663]: E1211 10:19:07.504061 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:07 crc kubenswrapper[4663]: E1211 10:19:07.504116 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.530865 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.530891 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.530900 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.530910 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.530917 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:07Z","lastTransitionTime":"2025-12-11T10:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.633087 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.633113 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.633121 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.633131 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.633138 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:07Z","lastTransitionTime":"2025-12-11T10:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.735166 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.735210 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.735219 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.735228 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.735235 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:07Z","lastTransitionTime":"2025-12-11T10:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.836959 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.836988 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.836997 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.837009 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.837018 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:07Z","lastTransitionTime":"2025-12-11T10:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.939218 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.939258 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.939267 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.939281 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:07 crc kubenswrapper[4663]: I1211 10:19:07.939290 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:07Z","lastTransitionTime":"2025-12-11T10:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.041356 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.041397 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.041405 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.041420 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.041431 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:08Z","lastTransitionTime":"2025-12-11T10:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.143420 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.143456 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.143465 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.143477 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.143487 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:08Z","lastTransitionTime":"2025-12-11T10:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.245122 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.245170 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.245203 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.245223 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.245237 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:08Z","lastTransitionTime":"2025-12-11T10:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.346989 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.347289 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.347357 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.347424 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.347480 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:08Z","lastTransitionTime":"2025-12-11T10:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.449860 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.450228 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.450327 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.450406 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.450463 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:08Z","lastTransitionTime":"2025-12-11T10:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.504614 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.504614 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:08 crc kubenswrapper[4663]: E1211 10:19:08.504749 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:08 crc kubenswrapper[4663]: E1211 10:19:08.504810 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.552524 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.552559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.552569 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.552584 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.552594 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:08Z","lastTransitionTime":"2025-12-11T10:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.654726 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.655516 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.655632 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.655712 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.655796 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:08Z","lastTransitionTime":"2025-12-11T10:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.758642 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.758679 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.758688 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.758700 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.758710 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:08Z","lastTransitionTime":"2025-12-11T10:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.860625 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.860656 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.860666 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.860677 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.860685 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:08Z","lastTransitionTime":"2025-12-11T10:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.962608 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.962648 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.962657 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.962670 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:08 crc kubenswrapper[4663]: I1211 10:19:08.962679 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:08Z","lastTransitionTime":"2025-12-11T10:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.065437 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.065477 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.065487 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.065500 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.065509 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:09Z","lastTransitionTime":"2025-12-11T10:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.167680 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.167711 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.167720 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.167734 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.167743 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:09Z","lastTransitionTime":"2025-12-11T10:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.269522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.269560 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.269569 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.269581 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.269590 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:09Z","lastTransitionTime":"2025-12-11T10:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.371769 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.371808 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.371817 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.371828 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.371839 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:09Z","lastTransitionTime":"2025-12-11T10:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.473527 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.473559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.473569 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.473581 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.473590 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:09Z","lastTransitionTime":"2025-12-11T10:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.504294 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.504307 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:09 crc kubenswrapper[4663]: E1211 10:19:09.504431 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:09 crc kubenswrapper[4663]: E1211 10:19:09.504541 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.516372 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.527130 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.535676 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.543875 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.552871 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:53Z\\\",\\\"message\\\":\\\"2025-12-11T10:18:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2\\\\n2025-12-11T10:18:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2 to /host/opt/cni/bin/\\\\n2025-12-11T10:18:08Z [verbose] multus-daemon started\\\\n2025-12-11T10:18:08Z [verbose] Readiness Indicator file check\\\\n2025-12-11T10:18:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.559873 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.568536 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.574753 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.574779 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.574788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.574800 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.574808 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:09Z","lastTransitionTime":"2025-12-11T10:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.577645 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.586710 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.595692 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.606153 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.612832 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.620156 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8746ac1c-332c-475c-bf39-1c4dbd584637\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dbc58fcfdfe62d2170360ef7d64580a71e7d25d21800257757b80a2bccff9a5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d29ef8426056a5691774eaa72cd716c08a424df3a0b426e1d7897d839230622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://625fddf0941d4a6bb39e6e05fa5888872b1e22f59fde1ffcbcb9990def6a4194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.629267 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ed97342-45c6-471f-97f2-7dc6ca0d0e63\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06faf32fe966c534260438465b3da92def0fd10908c333aa992567764401d34a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d781e969fff35a9312c0d9cb5e96a751741064ad0bae9e814ceaf39220f7500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d781e969fff35a9312c0d9cb5e96a751741064ad0bae9e814ceaf39220f7500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.639607 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.647677 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.660790 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:19:02Z\\\",\\\"message\\\":\\\"ent-go/informers/factory.go:160\\\\nI1211 10:19:02.168532 6725 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1211 10:19:02.168730 6725 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1211 10:19:02.168789 6725 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1211 10:19:02.168889 6725 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 10:19:02.168794 6725 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 10:19:02.169519 6725 factory.go:656] Stopping watch factory\\\\nI1211 10:19:02.189222 6725 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1211 10:19:02.189233 6725 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1211 10:19:02.189265 6725 ovnkube.go:599] Stopped ovnkube\\\\nI1211 10:19:02.189287 6725 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1211 10:19:02.189346 6725 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:19:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.668479 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:09Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.677368 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.677399 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.677408 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.677422 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.677431 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:09Z","lastTransitionTime":"2025-12-11T10:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.779128 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.779282 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.779383 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.779672 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.779734 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:09Z","lastTransitionTime":"2025-12-11T10:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.881142 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.881164 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.881173 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.881203 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.881215 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:09Z","lastTransitionTime":"2025-12-11T10:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.983841 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.983878 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.983887 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.983900 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:09 crc kubenswrapper[4663]: I1211 10:19:09.983910 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:09Z","lastTransitionTime":"2025-12-11T10:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.085049 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.085097 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.085106 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.085117 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.085127 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:10Z","lastTransitionTime":"2025-12-11T10:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.187488 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.187549 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.187559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.187574 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.187583 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:10Z","lastTransitionTime":"2025-12-11T10:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.290104 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.290313 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.290322 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.290335 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.290344 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:10Z","lastTransitionTime":"2025-12-11T10:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.392243 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.392277 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.392288 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.392330 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.392340 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:10Z","lastTransitionTime":"2025-12-11T10:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.479158 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.479278 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.479300 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.479329 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.479395 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:14.47936836 +0000 UTC m=+145.297298144 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.479414 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.479458 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:14.479447214 +0000 UTC m=+145.297376998 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.479502 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.479539 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:14.479529635 +0000 UTC m=+145.297459429 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.479607 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.479616 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.479626 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.479646 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:14.479640391 +0000 UTC m=+145.297570176 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.494507 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.494554 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.494564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.494577 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.494585 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:10Z","lastTransitionTime":"2025-12-11T10:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.503748 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.503835 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.503932 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.504023 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.580390 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.580584 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.580614 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.580628 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:19:10 crc kubenswrapper[4663]: E1211 10:19:10.580679 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:14.580663576 +0000 UTC m=+145.398593360 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.597149 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.597212 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.597223 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.597240 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.597250 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:10Z","lastTransitionTime":"2025-12-11T10:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.699673 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.699708 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.699718 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.699731 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.699741 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:10Z","lastTransitionTime":"2025-12-11T10:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.801389 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.801420 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.801428 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.801440 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.801450 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:10Z","lastTransitionTime":"2025-12-11T10:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.903918 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.903957 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.903965 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.903979 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:10 crc kubenswrapper[4663]: I1211 10:19:10.903989 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:10Z","lastTransitionTime":"2025-12-11T10:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.006788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.006826 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.006834 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.006847 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.006856 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.010129 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.010153 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.010162 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.010172 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.010191 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: E1211 10:19:11.021624 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.024635 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.024676 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.024685 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.024694 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.024702 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: E1211 10:19:11.034331 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.037616 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.037657 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.037667 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.037682 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.037692 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: E1211 10:19:11.047550 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.050151 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.050209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.050221 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.050233 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.050243 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: E1211 10:19:11.062106 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.065298 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.065334 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.065362 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.065377 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.065387 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: E1211 10:19:11.074968 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:11Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:11 crc kubenswrapper[4663]: E1211 10:19:11.075118 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.109521 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.109551 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.109560 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.109574 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.109582 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.211864 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.211902 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.211912 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.211926 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.211935 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.314449 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.314491 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.314499 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.314513 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.314524 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.416982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.417021 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.417030 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.417043 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.417053 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.504112 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.504152 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:11 crc kubenswrapper[4663]: E1211 10:19:11.504313 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:11 crc kubenswrapper[4663]: E1211 10:19:11.504478 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.518842 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.518871 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.518880 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.518894 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.518903 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.621166 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.621231 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.621241 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.621257 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.621267 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.723501 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.723573 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.723588 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.723618 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.723636 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.826527 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.826582 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.826595 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.826611 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.826622 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.928996 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.929039 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.929048 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.929062 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:11 crc kubenswrapper[4663]: I1211 10:19:11.929072 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:11Z","lastTransitionTime":"2025-12-11T10:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.031773 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.031808 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.031817 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.031831 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.031840 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:12Z","lastTransitionTime":"2025-12-11T10:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.133614 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.133647 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.133655 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.133669 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.133678 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:12Z","lastTransitionTime":"2025-12-11T10:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.235984 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.236039 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.236049 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.236064 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.236077 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:12Z","lastTransitionTime":"2025-12-11T10:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.339077 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.339118 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.339128 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.339142 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.339154 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:12Z","lastTransitionTime":"2025-12-11T10:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.441985 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.442029 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.442038 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.442052 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.442062 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:12Z","lastTransitionTime":"2025-12-11T10:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.504078 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.504125 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:12 crc kubenswrapper[4663]: E1211 10:19:12.504218 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:12 crc kubenswrapper[4663]: E1211 10:19:12.504369 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.543902 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.543931 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.543939 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.543950 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.543960 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:12Z","lastTransitionTime":"2025-12-11T10:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.646985 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.647038 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.647047 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.647067 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.647079 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:12Z","lastTransitionTime":"2025-12-11T10:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.749885 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.749927 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.749939 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.749957 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.749972 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:12Z","lastTransitionTime":"2025-12-11T10:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.852296 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.852334 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.852344 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.852359 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.852373 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:12Z","lastTransitionTime":"2025-12-11T10:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.953805 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.953839 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.953848 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.953861 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:12 crc kubenswrapper[4663]: I1211 10:19:12.953870 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:12Z","lastTransitionTime":"2025-12-11T10:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.055441 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.055471 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.055481 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.055495 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.055512 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:13Z","lastTransitionTime":"2025-12-11T10:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.157698 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.157765 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.157775 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.157795 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.157807 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:13Z","lastTransitionTime":"2025-12-11T10:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.259444 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.259490 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.259498 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.259533 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.259545 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:13Z","lastTransitionTime":"2025-12-11T10:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.361415 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.361476 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.361485 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.361498 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.361508 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:13Z","lastTransitionTime":"2025-12-11T10:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.464131 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.464170 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.464178 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.464213 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.464223 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:13Z","lastTransitionTime":"2025-12-11T10:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.504367 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.504436 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:13 crc kubenswrapper[4663]: E1211 10:19:13.504495 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:13 crc kubenswrapper[4663]: E1211 10:19:13.504586 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.565792 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.565819 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.565827 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.565838 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.565848 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:13Z","lastTransitionTime":"2025-12-11T10:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.668135 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.668208 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.668220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.668236 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.668247 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:13Z","lastTransitionTime":"2025-12-11T10:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.770446 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.770491 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.770500 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.770517 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.770529 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:13Z","lastTransitionTime":"2025-12-11T10:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.872514 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.872561 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.872570 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.872584 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.872593 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:13Z","lastTransitionTime":"2025-12-11T10:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.975148 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.975198 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.975207 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.975220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:13 crc kubenswrapper[4663]: I1211 10:19:13.975230 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:13Z","lastTransitionTime":"2025-12-11T10:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.077160 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.077230 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.077241 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.077256 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.077265 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:14Z","lastTransitionTime":"2025-12-11T10:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.179254 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.179319 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.179333 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.179346 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.179356 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:14Z","lastTransitionTime":"2025-12-11T10:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.281627 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.281664 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.281674 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.281686 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.281695 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:14Z","lastTransitionTime":"2025-12-11T10:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.384080 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.384109 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.384117 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.384127 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.384135 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:14Z","lastTransitionTime":"2025-12-11T10:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.486148 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.486204 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.486215 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.486229 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.486239 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:14Z","lastTransitionTime":"2025-12-11T10:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.504424 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.504447 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:14 crc kubenswrapper[4663]: E1211 10:19:14.504531 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:14 crc kubenswrapper[4663]: E1211 10:19:14.504591 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.588118 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.588156 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.588166 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.588203 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.588215 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:14Z","lastTransitionTime":"2025-12-11T10:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.690893 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.690934 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.690943 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.690961 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.690970 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:14Z","lastTransitionTime":"2025-12-11T10:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.792990 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.793029 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.793042 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.793055 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.793067 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:14Z","lastTransitionTime":"2025-12-11T10:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.895499 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.895542 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.895552 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.895567 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.895579 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:14Z","lastTransitionTime":"2025-12-11T10:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.998176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.998233 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.998242 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.998256 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:14 crc kubenswrapper[4663]: I1211 10:19:14.998266 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:14Z","lastTransitionTime":"2025-12-11T10:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.099943 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.099982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.099990 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.100002 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.100012 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:15Z","lastTransitionTime":"2025-12-11T10:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.202065 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.202104 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.202113 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.202126 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.202138 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:15Z","lastTransitionTime":"2025-12-11T10:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.304472 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.304504 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.304512 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.304525 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.304533 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:15Z","lastTransitionTime":"2025-12-11T10:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.407049 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.407087 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.407095 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.407115 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.407126 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:15Z","lastTransitionTime":"2025-12-11T10:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.504340 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.504348 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:15 crc kubenswrapper[4663]: E1211 10:19:15.504501 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:15 crc kubenswrapper[4663]: E1211 10:19:15.504532 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.508494 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.508515 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.508524 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.508537 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.508545 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:15Z","lastTransitionTime":"2025-12-11T10:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.611334 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.611368 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.611376 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.611388 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.611398 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:15Z","lastTransitionTime":"2025-12-11T10:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.713550 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.713581 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.713591 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.713602 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.713612 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:15Z","lastTransitionTime":"2025-12-11T10:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.815885 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.815951 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.815960 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.815973 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.816019 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:15Z","lastTransitionTime":"2025-12-11T10:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.917745 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.917787 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.917796 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.917813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:15 crc kubenswrapper[4663]: I1211 10:19:15.917822 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:15Z","lastTransitionTime":"2025-12-11T10:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.019741 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.019763 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.019771 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.019781 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.019789 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:16Z","lastTransitionTime":"2025-12-11T10:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.121469 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.121500 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.121510 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.121522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.121530 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:16Z","lastTransitionTime":"2025-12-11T10:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.223238 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.223278 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.223286 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.223297 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.223304 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:16Z","lastTransitionTime":"2025-12-11T10:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.325737 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.325769 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.325777 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.325789 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.325798 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:16Z","lastTransitionTime":"2025-12-11T10:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.427533 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.427565 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.427575 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.427586 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.427595 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:16Z","lastTransitionTime":"2025-12-11T10:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.503947 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.503947 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:16 crc kubenswrapper[4663]: E1211 10:19:16.504334 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:16 crc kubenswrapper[4663]: E1211 10:19:16.504378 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.504579 4663 scope.go:117] "RemoveContainer" containerID="0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e" Dec 11 10:19:16 crc kubenswrapper[4663]: E1211 10:19:16.504708 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.529980 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.530005 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.530014 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.530024 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.530040 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:16Z","lastTransitionTime":"2025-12-11T10:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.632382 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.632421 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.632430 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.632441 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.632450 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:16Z","lastTransitionTime":"2025-12-11T10:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.734769 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.734819 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.734828 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.734846 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.734862 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:16Z","lastTransitionTime":"2025-12-11T10:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.836426 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.836460 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.836468 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.836482 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.836492 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:16Z","lastTransitionTime":"2025-12-11T10:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.937984 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.938020 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.938028 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.938043 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:16 crc kubenswrapper[4663]: I1211 10:19:16.938052 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:16Z","lastTransitionTime":"2025-12-11T10:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.040742 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.041078 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.041144 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.041226 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.041310 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:17Z","lastTransitionTime":"2025-12-11T10:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.143799 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.143840 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.143850 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.143865 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.143873 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:17Z","lastTransitionTime":"2025-12-11T10:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.245564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.245592 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.245601 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.245614 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.245624 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:17Z","lastTransitionTime":"2025-12-11T10:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.347935 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.347993 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.348006 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.348022 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.348033 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:17Z","lastTransitionTime":"2025-12-11T10:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.449782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.450106 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.450213 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.450298 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.450372 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:17Z","lastTransitionTime":"2025-12-11T10:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.504626 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.504691 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:17 crc kubenswrapper[4663]: E1211 10:19:17.504997 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:17 crc kubenswrapper[4663]: E1211 10:19:17.505156 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.552922 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.552953 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.552961 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.552973 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.552981 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:17Z","lastTransitionTime":"2025-12-11T10:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.654888 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.654921 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.654929 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.654957 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.654967 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:17Z","lastTransitionTime":"2025-12-11T10:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.756885 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.756917 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.756924 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.756937 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.756945 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:17Z","lastTransitionTime":"2025-12-11T10:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.858660 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.858694 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.858704 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.858717 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.858726 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:17Z","lastTransitionTime":"2025-12-11T10:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.960748 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.960781 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.960790 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.960803 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:17 crc kubenswrapper[4663]: I1211 10:19:17.960811 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:17Z","lastTransitionTime":"2025-12-11T10:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.062564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.062628 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.062637 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.062648 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.062657 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:18Z","lastTransitionTime":"2025-12-11T10:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.164626 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.164666 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.164674 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.164686 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.164697 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:18Z","lastTransitionTime":"2025-12-11T10:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.266423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.266456 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.266464 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.266475 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.266483 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:18Z","lastTransitionTime":"2025-12-11T10:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.367894 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.367923 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.367931 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.367942 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.367951 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:18Z","lastTransitionTime":"2025-12-11T10:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.469924 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.469962 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.469971 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.469984 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.469994 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:18Z","lastTransitionTime":"2025-12-11T10:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.504392 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.504423 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:18 crc kubenswrapper[4663]: E1211 10:19:18.504713 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:18 crc kubenswrapper[4663]: E1211 10:19:18.504891 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.572225 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.572431 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.572495 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.572557 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.572651 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:18Z","lastTransitionTime":"2025-12-11T10:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.674203 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.674230 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.674239 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.674251 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.674260 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:18Z","lastTransitionTime":"2025-12-11T10:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.776721 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.776752 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.776761 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.776773 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.776783 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:18Z","lastTransitionTime":"2025-12-11T10:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.878517 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.878547 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.878555 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.878567 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.878577 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:18Z","lastTransitionTime":"2025-12-11T10:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.980380 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.980405 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.980413 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.980423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:18 crc kubenswrapper[4663]: I1211 10:19:18.980431 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:18Z","lastTransitionTime":"2025-12-11T10:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.081928 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.081951 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.081959 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.081969 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.081975 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:19Z","lastTransitionTime":"2025-12-11T10:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.183686 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.183715 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.183723 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.183734 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.183740 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:19Z","lastTransitionTime":"2025-12-11T10:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.285974 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.285996 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.286003 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.286014 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.286021 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:19Z","lastTransitionTime":"2025-12-11T10:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.388357 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.388384 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.388393 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.388404 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.388412 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:19Z","lastTransitionTime":"2025-12-11T10:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.489872 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.489901 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.489910 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.489920 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.489927 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:19Z","lastTransitionTime":"2025-12-11T10:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.504299 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.504325 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:19 crc kubenswrapper[4663]: E1211 10:19:19.504395 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:19 crc kubenswrapper[4663]: E1211 10:19:19.504441 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.516768 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b934597e-ac43-4d21-82b1-c8d4260800e6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1211 10:18:06.233120 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1211 10:18:06.233258 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 10:18:06.234798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2924789290/tls.crt::/tmp/serving-cert-2924789290/tls.key\\\\\\\"\\\\nI1211 10:18:06.570830 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 10:18:06.573023 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 10:18:06.573049 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 10:18:06.573076 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 10:18:06.573081 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 10:18:06.580393 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 10:18:06.580415 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 10:18:06.580424 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 10:18:06.580426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 10:18:06.580429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 10:18:06.580432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 10:18:06.580522 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 10:18:06.581348 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.524452 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e980e59ce0e172d37122fdae971fb0123940795827553f646ae1a6283f523\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.536328 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f739f0f-4546-4db5-809d-435823288bd8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:19:02Z\\\",\\\"message\\\":\\\"ent-go/informers/factory.go:160\\\\nI1211 10:19:02.168532 6725 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1211 10:19:02.168730 6725 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1211 10:19:02.168789 6725 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1211 10:19:02.168889 6725 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 10:19:02.168794 6725 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 10:19:02.169519 6725 factory.go:656] Stopping watch factory\\\\nI1211 10:19:02.189222 6725 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1211 10:19:02.189233 6725 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1211 10:19:02.189265 6725 ovnkube.go:599] Stopped ovnkube\\\\nI1211 10:19:02.189287 6725 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1211 10:19:02.189346 6725 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:19:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lxx7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-z7882\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.543113 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11443ad0-297e-48c9-9c1e-3771584468ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c487fbed4984533659f3e1f3a79783d12b4e95706a529dc9cd24abb62adda34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d93647b4b0d3f4391ca6b565072c59f3f32be1433b5a043e3bb93395d529ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-997ts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:19Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hbks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.550362 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.556502 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8f5qw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7cf8637-c767-47eb-8d38-578340067566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3440496709e41014f785c5aadeba1bae72c4b733779039f7508fa2ca78144537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-646rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8f5qw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.563102 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98941020-2764-490d-81db-9decd21aaa13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a72fbeafe047b24c15eb96a00eacf7ff119486159cd9cf32f63af409aecc529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46t4z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mq524\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.570626 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-qv7jl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43cbb02f-aa8f-41aa-bd0a-8042aca3002c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T10:18:53Z\\\",\\\"message\\\":\\\"2025-12-11T10:18:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2\\\\n2025-12-11T10:18:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5664ca76-8f3b-4c41-a819-a70f2bce86e2 to /host/opt/cni/bin/\\\\n2025-12-11T10:18:08Z [verbose] multus-daemon started\\\\n2025-12-11T10:18:08Z [verbose] Readiness Indicator file check\\\\n2025-12-11T10:18:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4c2nm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-qv7jl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.577480 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54c64914-8d10-48bc-a755-1f7257400a32\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8wsjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5r5q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.585507 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dc8119f8882cfe2688a3b38b4746f6f9f75764080053fae1f19edd7880464e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de08980167134ace33e7d6a626c4ee66e733e247e6e8d0f7e969105c12f41580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.591366 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.591397 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.591407 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.591420 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.591428 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:19Z","lastTransitionTime":"2025-12-11T10:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.594027 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8fc3491f01b999c50e166f30aef0a1c9d95bc6935a4b68cdac9bfbb70bce980\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.601734 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.609526 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.619649 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27da1f1a-7a92-4c81-9889-21b44ea62db0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcf1553f9e95b87867fdf90f869863968036e47ec536a526cae63020cc9119fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3e3ac4c15dbb267098a6aebee0e6674acad0d1daf9c47b795cbd03e47c4e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c7d3aa6b65793f7687f0314843306ee266cdd8fa9a71246dea0b11273b1f69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d272d8e115f1194ea8d2b5da27b60942b98d8bc002d56b223298a1a5bc80834\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b7dfcfeae35497b38412561a952adf6d955c3e6fc071c0d380197d53e26c604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef7bb22e51b47e9e49687153801d9a61be88291a5c4ae94f0d6c779c1eb779c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28edb9dba08d7e8cccab85d649f75ce2c946ff8be7bb2de39e02432156e2ccf8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:18:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:18:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dllpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:07Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4g9zj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.626606 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8lh4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0724e8ea-646f-422e-bc21-39189bddba17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14a19c708bdceab221701a9e228b35336aa133a021ee7a088800e9bec694e175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:18:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4cn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:18:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8lh4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.635091 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82107b1a-b827-4c4a-b133-ce7e8ed73963\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf6437a10b018ecd7f7c1a23eb664377818d86ae958710d078a245878cd7d5a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed406469f2fa49f8100c4c2c4adf80a173b1e5c8ab8581a4dc8c4d93fd557c7b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b28b0a79fd6ef3045b5417a53b9e665faea306044a08b64362f5f573221dea8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.641928 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ed97342-45c6-471f-97f2-7dc6ca0d0e63\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06faf32fe966c534260438465b3da92def0fd10908c333aa992567764401d34a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d781e969fff35a9312c0d9cb5e96a751741064ad0bae9e814ceaf39220f7500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d781e969fff35a9312c0d9cb5e96a751741064ad0bae9e814ceaf39220f7500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.649613 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8746ac1c-332c-475c-bf39-1c4dbd584637\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:18:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T10:17:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dbc58fcfdfe62d2170360ef7d64580a71e7d25d21800257757b80a2bccff9a5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d29ef8426056a5691774eaa72cd716c08a424df3a0b426e1d7897d839230622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://625fddf0941d4a6bb39e6e05fa5888872b1e22f59fde1ffcbcb9990def6a4194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T10:17:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://977a2d3f94cdea486681aa43acfca631266808e41aeb1af091aa92400821ed37\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T10:17:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T10:17:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T10:17:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:19Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.693836 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.693884 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.693894 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.693908 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.693919 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:19Z","lastTransitionTime":"2025-12-11T10:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.795751 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.795780 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.795787 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.795802 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.795811 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:19Z","lastTransitionTime":"2025-12-11T10:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.898340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.898387 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.898398 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.898415 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:19 crc kubenswrapper[4663]: I1211 10:19:19.898426 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:19Z","lastTransitionTime":"2025-12-11T10:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.000452 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.000594 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.000672 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.000769 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.000831 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:20Z","lastTransitionTime":"2025-12-11T10:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.102969 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.103116 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.103201 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.103262 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.103342 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:20Z","lastTransitionTime":"2025-12-11T10:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.205320 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.205355 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.205364 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.205378 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.205388 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:20Z","lastTransitionTime":"2025-12-11T10:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.307396 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.307447 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.307457 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.307470 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.307481 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:20Z","lastTransitionTime":"2025-12-11T10:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.409818 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.409876 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.409885 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.409898 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.409907 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:20Z","lastTransitionTime":"2025-12-11T10:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.503614 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.503688 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:20 crc kubenswrapper[4663]: E1211 10:19:20.503747 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:20 crc kubenswrapper[4663]: E1211 10:19:20.503852 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.512009 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.512104 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.512170 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.512291 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.512351 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:20Z","lastTransitionTime":"2025-12-11T10:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.613954 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.613982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.613991 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.614000 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.614009 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:20Z","lastTransitionTime":"2025-12-11T10:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.715892 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.715963 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.715976 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.715996 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.716011 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:20Z","lastTransitionTime":"2025-12-11T10:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.817429 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.817461 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.817469 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.817481 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.817489 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:20Z","lastTransitionTime":"2025-12-11T10:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.919108 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.919162 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.919176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.919209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:20 crc kubenswrapper[4663]: I1211 10:19:20.919219 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:20Z","lastTransitionTime":"2025-12-11T10:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.020980 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.021071 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.021082 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.021097 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.021108 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.123483 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.123512 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.123519 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.123531 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.123538 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.225241 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.225265 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.225273 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.225286 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.225294 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.279534 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.279559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.279567 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.279576 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.279585 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: E1211 10:19:21.288374 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:21Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.290397 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.290415 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.290423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.290432 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.290439 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: E1211 10:19:21.298008 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:21Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.299811 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.299844 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.299852 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.299862 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.299868 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: E1211 10:19:21.307459 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:21Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.309480 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.309501 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.309509 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.309519 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.309526 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: E1211 10:19:21.317889 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:21Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.320773 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.320809 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.320833 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.320846 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.320856 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: E1211 10:19:21.330001 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T10:19:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a09a2675-5943-46f1-b4c2-67ff05e820a6\\\",\\\"systemUUID\\\":\\\"18e040fd-bd85-4c98-8f53-60800a512169\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T10:19:21Z is after 2025-08-24T17:21:41Z" Dec 11 10:19:21 crc kubenswrapper[4663]: E1211 10:19:21.330123 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.331195 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.331228 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.331239 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.331254 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.331265 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.433143 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.433196 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.433210 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.433223 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.433231 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.503905 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.503980 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:21 crc kubenswrapper[4663]: E1211 10:19:21.504030 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:21 crc kubenswrapper[4663]: E1211 10:19:21.504089 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.534758 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.534795 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.534804 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.534815 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.534825 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.636884 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.636921 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.636929 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.636940 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.636949 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.739831 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.739892 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.739901 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.739916 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.739927 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.841618 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.841663 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.841672 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.841684 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.841693 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.944624 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.944696 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.944710 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.944732 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:21 crc kubenswrapper[4663]: I1211 10:19:21.944747 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:21Z","lastTransitionTime":"2025-12-11T10:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.047559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.047612 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.047620 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.047635 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.047646 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:22Z","lastTransitionTime":"2025-12-11T10:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.150470 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.150519 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.150531 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.150554 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.150565 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:22Z","lastTransitionTime":"2025-12-11T10:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.252305 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.252341 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.252349 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.252362 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.252371 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:22Z","lastTransitionTime":"2025-12-11T10:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.354432 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.354477 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.354485 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.354501 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.354512 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:22Z","lastTransitionTime":"2025-12-11T10:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.456890 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.456933 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.456944 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.456955 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.456967 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:22Z","lastTransitionTime":"2025-12-11T10:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.504586 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.504615 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:22 crc kubenswrapper[4663]: E1211 10:19:22.504759 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:22 crc kubenswrapper[4663]: E1211 10:19:22.504854 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.559310 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.559349 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.559357 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.559371 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.559395 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:22Z","lastTransitionTime":"2025-12-11T10:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.662100 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.662152 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.662161 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.662177 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.662211 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:22Z","lastTransitionTime":"2025-12-11T10:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.764314 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.764352 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.764360 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.764375 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.764385 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:22Z","lastTransitionTime":"2025-12-11T10:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.866486 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.866531 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.866539 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.866554 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.866565 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:22Z","lastTransitionTime":"2025-12-11T10:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.968591 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.968628 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.968638 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.968652 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:22 crc kubenswrapper[4663]: I1211 10:19:22.968662 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:22Z","lastTransitionTime":"2025-12-11T10:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.071070 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.071117 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.071126 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.071143 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.071154 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:23Z","lastTransitionTime":"2025-12-11T10:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.173386 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.173423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.173431 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.173446 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.173454 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:23Z","lastTransitionTime":"2025-12-11T10:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.275603 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.275638 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.275648 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.275661 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.275669 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:23Z","lastTransitionTime":"2025-12-11T10:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.377878 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.377909 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.377917 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.377930 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.377939 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:23Z","lastTransitionTime":"2025-12-11T10:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.479921 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.479964 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.479986 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.480002 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.480012 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:23Z","lastTransitionTime":"2025-12-11T10:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.504608 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:23 crc kubenswrapper[4663]: E1211 10:19:23.504715 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.504620 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:23 crc kubenswrapper[4663]: E1211 10:19:23.504791 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.582418 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.582456 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.582478 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.582492 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.582502 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:23Z","lastTransitionTime":"2025-12-11T10:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.685166 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.685229 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.685243 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.685262 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.685275 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:23Z","lastTransitionTime":"2025-12-11T10:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.787858 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.787918 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.787929 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.787947 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.787963 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:23Z","lastTransitionTime":"2025-12-11T10:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.890505 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.890550 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.890558 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.890574 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.890586 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:23Z","lastTransitionTime":"2025-12-11T10:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.992889 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.992926 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.992935 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.992949 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:23 crc kubenswrapper[4663]: I1211 10:19:23.992958 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:23Z","lastTransitionTime":"2025-12-11T10:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.095100 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.095137 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.095145 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.095160 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.095170 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:24Z","lastTransitionTime":"2025-12-11T10:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.196869 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.196903 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.196912 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.196925 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.196935 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:24Z","lastTransitionTime":"2025-12-11T10:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.299316 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.299358 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.299370 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.299385 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.299397 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:24Z","lastTransitionTime":"2025-12-11T10:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.401096 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.401120 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.401130 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.401143 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.401152 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:24Z","lastTransitionTime":"2025-12-11T10:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.403883 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:24 crc kubenswrapper[4663]: E1211 10:19:24.403996 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:19:24 crc kubenswrapper[4663]: E1211 10:19:24.404063 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs podName:54c64914-8d10-48bc-a755-1f7257400a32 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:28.404037471 +0000 UTC m=+159.221967245 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs") pod "network-metrics-daemon-5r5q6" (UID: "54c64914-8d10-48bc-a755-1f7257400a32") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.503284 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.503340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.503350 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.503365 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.503376 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:24Z","lastTransitionTime":"2025-12-11T10:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.503647 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.503647 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:24 crc kubenswrapper[4663]: E1211 10:19:24.503818 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:24 crc kubenswrapper[4663]: E1211 10:19:24.503893 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.605375 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.605431 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.605442 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.605462 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.605473 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:24Z","lastTransitionTime":"2025-12-11T10:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.707208 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.707244 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.707254 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.707267 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.707276 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:24Z","lastTransitionTime":"2025-12-11T10:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.808551 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.808582 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.808590 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.808602 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.808610 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:24Z","lastTransitionTime":"2025-12-11T10:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.910683 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.910728 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.910736 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.910751 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:24 crc kubenswrapper[4663]: I1211 10:19:24.910760 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:24Z","lastTransitionTime":"2025-12-11T10:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.013774 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.013829 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.013841 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.013864 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.013881 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:25Z","lastTransitionTime":"2025-12-11T10:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.115906 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.115953 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.115962 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.115981 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.115991 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:25Z","lastTransitionTime":"2025-12-11T10:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.218072 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.218145 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.218157 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.218177 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.218210 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:25Z","lastTransitionTime":"2025-12-11T10:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.320144 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.320214 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.320227 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.320244 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.320257 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:25Z","lastTransitionTime":"2025-12-11T10:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.422398 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.422451 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.422460 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.422480 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.422491 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:25Z","lastTransitionTime":"2025-12-11T10:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.504003 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.504213 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:25 crc kubenswrapper[4663]: E1211 10:19:25.504281 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:25 crc kubenswrapper[4663]: E1211 10:19:25.504375 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.524243 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.524298 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.524310 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.524329 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.524344 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:25Z","lastTransitionTime":"2025-12-11T10:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.626800 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.626851 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.626862 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.626878 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.626890 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:25Z","lastTransitionTime":"2025-12-11T10:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.729082 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.729132 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.729163 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.729193 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.729207 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:25Z","lastTransitionTime":"2025-12-11T10:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.832281 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.832335 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.832349 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.832367 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.832379 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:25Z","lastTransitionTime":"2025-12-11T10:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.934062 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.934099 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.934107 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.934123 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:25 crc kubenswrapper[4663]: I1211 10:19:25.934133 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:25Z","lastTransitionTime":"2025-12-11T10:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.036451 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.036478 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.036486 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.036495 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.036504 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:26Z","lastTransitionTime":"2025-12-11T10:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.138545 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.138580 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.138589 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.138601 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.138610 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:26Z","lastTransitionTime":"2025-12-11T10:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.240727 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.240755 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.240763 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.240772 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.240781 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:26Z","lastTransitionTime":"2025-12-11T10:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.342728 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.342752 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.342760 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.342771 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.342779 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:26Z","lastTransitionTime":"2025-12-11T10:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.444454 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.444482 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.444493 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.444503 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.444511 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:26Z","lastTransitionTime":"2025-12-11T10:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.504322 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.504442 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:26 crc kubenswrapper[4663]: E1211 10:19:26.504651 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:26 crc kubenswrapper[4663]: E1211 10:19:26.504746 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.547901 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.548020 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.548036 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.548069 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.548095 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:26Z","lastTransitionTime":"2025-12-11T10:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.651529 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.651591 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.651602 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.651623 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.651647 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:26Z","lastTransitionTime":"2025-12-11T10:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.753655 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.753718 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.753730 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.753746 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.753756 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:26Z","lastTransitionTime":"2025-12-11T10:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.855673 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.855710 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.855720 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.855736 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.855745 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:26Z","lastTransitionTime":"2025-12-11T10:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.958412 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.958455 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.958463 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.958478 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:26 crc kubenswrapper[4663]: I1211 10:19:26.958486 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:26Z","lastTransitionTime":"2025-12-11T10:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.061044 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.061087 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.061096 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.061126 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.061139 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:27Z","lastTransitionTime":"2025-12-11T10:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.163645 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.163681 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.163690 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.163703 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.163712 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:27Z","lastTransitionTime":"2025-12-11T10:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.265399 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.265429 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.265437 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.265448 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.265457 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:27Z","lastTransitionTime":"2025-12-11T10:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.367295 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.367333 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.367342 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.367354 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.367364 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:27Z","lastTransitionTime":"2025-12-11T10:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.468753 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.468782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.468790 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.468801 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.468810 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:27Z","lastTransitionTime":"2025-12-11T10:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.504205 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:27 crc kubenswrapper[4663]: E1211 10:19:27.504407 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.504207 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:27 crc kubenswrapper[4663]: E1211 10:19:27.504601 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.571482 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.571535 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.571550 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.571571 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.571583 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:27Z","lastTransitionTime":"2025-12-11T10:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.674302 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.674349 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.674358 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.674374 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.674384 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:27Z","lastTransitionTime":"2025-12-11T10:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.775859 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.775897 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.775905 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.775922 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.775932 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:27Z","lastTransitionTime":"2025-12-11T10:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.878490 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.878517 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.878525 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.878537 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.878545 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:27Z","lastTransitionTime":"2025-12-11T10:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.980443 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.980481 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.980490 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.980502 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:27 crc kubenswrapper[4663]: I1211 10:19:27.980523 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:27Z","lastTransitionTime":"2025-12-11T10:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.082316 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.082345 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.082353 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.082366 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.082374 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:28Z","lastTransitionTime":"2025-12-11T10:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.184037 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.184069 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.184090 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.184103 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.184112 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:28Z","lastTransitionTime":"2025-12-11T10:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.286742 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.286777 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.286808 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.286821 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.286830 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:28Z","lastTransitionTime":"2025-12-11T10:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.389823 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.389865 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.389879 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.389894 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.389904 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:28Z","lastTransitionTime":"2025-12-11T10:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.492724 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.492781 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.492793 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.492815 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.492827 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:28Z","lastTransitionTime":"2025-12-11T10:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.503977 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.504086 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:28 crc kubenswrapper[4663]: E1211 10:19:28.504404 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:28 crc kubenswrapper[4663]: E1211 10:19:28.504470 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.504640 4663 scope.go:117] "RemoveContainer" containerID="0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e" Dec 11 10:19:28 crc kubenswrapper[4663]: E1211 10:19:28.504851 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-z7882_openshift-ovn-kubernetes(4f739f0f-4546-4db5-809d-435823288bd8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.595120 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.595173 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.595205 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.595224 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.595239 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:28Z","lastTransitionTime":"2025-12-11T10:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.697392 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.697432 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.697442 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.697458 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.697468 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:28Z","lastTransitionTime":"2025-12-11T10:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.799096 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.799130 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.799138 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.799154 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.799164 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:28Z","lastTransitionTime":"2025-12-11T10:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.901481 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.901532 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.901543 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.901559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:28 crc kubenswrapper[4663]: I1211 10:19:28.901571 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:28Z","lastTransitionTime":"2025-12-11T10:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.003601 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.003655 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.003666 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.003679 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.003688 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:29Z","lastTransitionTime":"2025-12-11T10:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.105786 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.105828 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.105836 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.105852 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.105861 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:29Z","lastTransitionTime":"2025-12-11T10:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.208136 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.208167 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.208176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.208201 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.208209 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:29Z","lastTransitionTime":"2025-12-11T10:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.310694 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.310731 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.310742 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.310756 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.310765 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:29Z","lastTransitionTime":"2025-12-11T10:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.413443 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.413502 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.413511 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.413526 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.413538 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:29Z","lastTransitionTime":"2025-12-11T10:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.504528 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.504602 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:29 crc kubenswrapper[4663]: E1211 10:19:29.504685 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:29 crc kubenswrapper[4663]: E1211 10:19:29.504923 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.515977 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.516029 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.516051 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.516070 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.516081 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:29Z","lastTransitionTime":"2025-12-11T10:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.548459 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-4g9zj" podStartSLOduration=83.548417388 podStartE2EDuration="1m23.548417388s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:29.54833199 +0000 UTC m=+100.366261784" watchObservedRunningTime="2025-12-11 10:19:29.548417388 +0000 UTC m=+100.366347182" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.568154 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-8lh4c" podStartSLOduration=83.56813146 podStartE2EDuration="1m23.56813146s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:29.556891087 +0000 UTC m=+100.374820871" watchObservedRunningTime="2025-12-11 10:19:29.56813146 +0000 UTC m=+100.386061245" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.568315 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=83.568307406 podStartE2EDuration="1m23.568307406s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:29.567777184 +0000 UTC m=+100.385706969" watchObservedRunningTime="2025-12-11 10:19:29.568307406 +0000 UTC m=+100.386237190" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.594370 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=48.594340435 podStartE2EDuration="48.594340435s" podCreationTimestamp="2025-12-11 10:18:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:29.594151104 +0000 UTC m=+100.412080888" watchObservedRunningTime="2025-12-11 10:19:29.594340435 +0000 UTC m=+100.412270219" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.603348 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=26.603315533 podStartE2EDuration="26.603315533s" podCreationTimestamp="2025-12-11 10:19:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:29.602972647 +0000 UTC m=+100.420902431" watchObservedRunningTime="2025-12-11 10:19:29.603315533 +0000 UTC m=+100.421245306" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.617768 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.617813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.617823 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.617838 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.617848 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:29Z","lastTransitionTime":"2025-12-11T10:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.640496 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hbks6" podStartSLOduration=82.640477168 podStartE2EDuration="1m22.640477168s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:29.640076816 +0000 UTC m=+100.458006600" watchObservedRunningTime="2025-12-11 10:19:29.640477168 +0000 UTC m=+100.458406952" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.651497 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=83.651476716 podStartE2EDuration="1m23.651476716s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:29.651386309 +0000 UTC m=+100.469316092" watchObservedRunningTime="2025-12-11 10:19:29.651476716 +0000 UTC m=+100.469406500" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.665302 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-8f5qw" podStartSLOduration=83.665285148 podStartE2EDuration="1m23.665285148s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:29.658259331 +0000 UTC m=+100.476189116" watchObservedRunningTime="2025-12-11 10:19:29.665285148 +0000 UTC m=+100.483214932" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.665421 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podStartSLOduration=83.665416511 podStartE2EDuration="1m23.665416511s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:29.665161108 +0000 UTC m=+100.483090892" watchObservedRunningTime="2025-12-11 10:19:29.665416511 +0000 UTC m=+100.483346295" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.682894 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-qv7jl" podStartSLOduration=83.682877459 podStartE2EDuration="1m23.682877459s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:29.67533168 +0000 UTC m=+100.493261464" watchObservedRunningTime="2025-12-11 10:19:29.682877459 +0000 UTC m=+100.500807243" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.720294 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.720348 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.720358 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.720374 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.720384 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:29Z","lastTransitionTime":"2025-12-11T10:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.823893 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.823945 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.823958 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.823977 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.823993 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:29Z","lastTransitionTime":"2025-12-11T10:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.926542 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.926577 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.926587 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.926602 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:29 crc kubenswrapper[4663]: I1211 10:19:29.926611 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:29Z","lastTransitionTime":"2025-12-11T10:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.029646 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.029726 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.029741 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.029767 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.029782 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:30Z","lastTransitionTime":"2025-12-11T10:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.132407 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.133054 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.133090 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.133120 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.133134 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:30Z","lastTransitionTime":"2025-12-11T10:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.236089 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.236130 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.236142 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.236162 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.236199 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:30Z","lastTransitionTime":"2025-12-11T10:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.338413 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.338472 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.338481 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.338505 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.338519 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:30Z","lastTransitionTime":"2025-12-11T10:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.441445 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.441490 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.441502 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.441516 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.441526 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:30Z","lastTransitionTime":"2025-12-11T10:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.504684 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.504738 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:30 crc kubenswrapper[4663]: E1211 10:19:30.504872 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:30 crc kubenswrapper[4663]: E1211 10:19:30.504982 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.543707 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.543760 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.543772 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.543790 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.543803 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:30Z","lastTransitionTime":"2025-12-11T10:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.646355 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.646429 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.646443 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.646461 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.646473 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:30Z","lastTransitionTime":"2025-12-11T10:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.748970 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.749022 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.749031 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.749045 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.749054 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:30Z","lastTransitionTime":"2025-12-11T10:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.851324 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.851368 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.851376 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.851564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.851592 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:30Z","lastTransitionTime":"2025-12-11T10:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.953788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.953842 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.953852 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.953867 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:30 crc kubenswrapper[4663]: I1211 10:19:30.953878 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:30Z","lastTransitionTime":"2025-12-11T10:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.056220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.056264 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.056275 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.056291 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.056303 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:31Z","lastTransitionTime":"2025-12-11T10:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.158450 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.158489 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.158497 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.158513 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.158523 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:31Z","lastTransitionTime":"2025-12-11T10:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.260694 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.260869 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.260933 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.261005 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.261072 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:31Z","lastTransitionTime":"2025-12-11T10:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.363704 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.363749 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.363759 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.363774 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.363783 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:31Z","lastTransitionTime":"2025-12-11T10:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.466282 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.466318 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.466329 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.466342 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.466352 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:31Z","lastTransitionTime":"2025-12-11T10:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.504664 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.504678 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:31 crc kubenswrapper[4663]: E1211 10:19:31.504829 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:31 crc kubenswrapper[4663]: E1211 10:19:31.505025 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.547513 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.547546 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.547554 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.547565 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.547573 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T10:19:31Z","lastTransitionTime":"2025-12-11T10:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.580694 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr"] Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.581288 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.583549 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.584580 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.584833 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.586330 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.671569 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c671d764-c28b-48da-8c31-df700839fba1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.671614 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c671d764-c28b-48da-8c31-df700839fba1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.671826 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c671d764-c28b-48da-8c31-df700839fba1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.671883 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c671d764-c28b-48da-8c31-df700839fba1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.671901 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c671d764-c28b-48da-8c31-df700839fba1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.772555 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c671d764-c28b-48da-8c31-df700839fba1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.772598 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c671d764-c28b-48da-8c31-df700839fba1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.772656 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c671d764-c28b-48da-8c31-df700839fba1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.772675 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c671d764-c28b-48da-8c31-df700839fba1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.772690 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c671d764-c28b-48da-8c31-df700839fba1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.772935 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c671d764-c28b-48da-8c31-df700839fba1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.773167 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c671d764-c28b-48da-8c31-df700839fba1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.773681 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c671d764-c28b-48da-8c31-df700839fba1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.778013 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c671d764-c28b-48da-8c31-df700839fba1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.787248 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c671d764-c28b-48da-8c31-df700839fba1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-56cnr\" (UID: \"c671d764-c28b-48da-8c31-df700839fba1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.896272 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" Dec 11 10:19:31 crc kubenswrapper[4663]: I1211 10:19:31.928471 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" event={"ID":"c671d764-c28b-48da-8c31-df700839fba1","Type":"ContainerStarted","Data":"14d70ecbaecd4954b4ffa3ac42927b7fb0b40bf395a5bf8b4dd624e785e6a62e"} Dec 11 10:19:32 crc kubenswrapper[4663]: I1211 10:19:32.504349 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:32 crc kubenswrapper[4663]: I1211 10:19:32.504417 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:32 crc kubenswrapper[4663]: E1211 10:19:32.504468 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:32 crc kubenswrapper[4663]: E1211 10:19:32.504561 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:32 crc kubenswrapper[4663]: I1211 10:19:32.932404 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" event={"ID":"c671d764-c28b-48da-8c31-df700839fba1","Type":"ContainerStarted","Data":"7fab3fd1e4261a45092f2c94c519d8eced6a789eebc073cf3ccd5bc055414dca"} Dec 11 10:19:32 crc kubenswrapper[4663]: I1211 10:19:32.945849 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-56cnr" podStartSLOduration=86.945831243 podStartE2EDuration="1m26.945831243s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:32.944832152 +0000 UTC m=+103.762761935" watchObservedRunningTime="2025-12-11 10:19:32.945831243 +0000 UTC m=+103.763761026" Dec 11 10:19:33 crc kubenswrapper[4663]: I1211 10:19:33.504294 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:33 crc kubenswrapper[4663]: I1211 10:19:33.504655 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:33 crc kubenswrapper[4663]: E1211 10:19:33.504887 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:33 crc kubenswrapper[4663]: E1211 10:19:33.504741 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:33 crc kubenswrapper[4663]: I1211 10:19:33.517821 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 11 10:19:34 crc kubenswrapper[4663]: I1211 10:19:34.504697 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:34 crc kubenswrapper[4663]: I1211 10:19:34.504808 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:34 crc kubenswrapper[4663]: E1211 10:19:34.504872 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:34 crc kubenswrapper[4663]: E1211 10:19:34.505032 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:35 crc kubenswrapper[4663]: I1211 10:19:35.504596 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:35 crc kubenswrapper[4663]: I1211 10:19:35.504613 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:35 crc kubenswrapper[4663]: E1211 10:19:35.504772 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:35 crc kubenswrapper[4663]: E1211 10:19:35.504874 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:36 crc kubenswrapper[4663]: I1211 10:19:36.504052 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:36 crc kubenswrapper[4663]: I1211 10:19:36.504087 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:36 crc kubenswrapper[4663]: E1211 10:19:36.504223 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:36 crc kubenswrapper[4663]: E1211 10:19:36.504331 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:37 crc kubenswrapper[4663]: I1211 10:19:37.504642 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:37 crc kubenswrapper[4663]: I1211 10:19:37.504721 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:37 crc kubenswrapper[4663]: E1211 10:19:37.504795 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:37 crc kubenswrapper[4663]: E1211 10:19:37.505009 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:38 crc kubenswrapper[4663]: I1211 10:19:38.504506 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:38 crc kubenswrapper[4663]: I1211 10:19:38.504587 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:38 crc kubenswrapper[4663]: E1211 10:19:38.504651 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:38 crc kubenswrapper[4663]: E1211 10:19:38.504758 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:39 crc kubenswrapper[4663]: I1211 10:19:39.504685 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:39 crc kubenswrapper[4663]: I1211 10:19:39.504728 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:39 crc kubenswrapper[4663]: E1211 10:19:39.505711 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:39 crc kubenswrapper[4663]: E1211 10:19:39.505895 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:39 crc kubenswrapper[4663]: I1211 10:19:39.526798 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=6.526787941 podStartE2EDuration="6.526787941s" podCreationTimestamp="2025-12-11 10:19:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:39.526364847 +0000 UTC m=+110.344294631" watchObservedRunningTime="2025-12-11 10:19:39.526787941 +0000 UTC m=+110.344717725" Dec 11 10:19:39 crc kubenswrapper[4663]: I1211 10:19:39.950984 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-qv7jl_43cbb02f-aa8f-41aa-bd0a-8042aca3002c/kube-multus/1.log" Dec 11 10:19:39 crc kubenswrapper[4663]: I1211 10:19:39.951627 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-qv7jl_43cbb02f-aa8f-41aa-bd0a-8042aca3002c/kube-multus/0.log" Dec 11 10:19:39 crc kubenswrapper[4663]: I1211 10:19:39.951663 4663 generic.go:334] "Generic (PLEG): container finished" podID="43cbb02f-aa8f-41aa-bd0a-8042aca3002c" containerID="60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73" exitCode=1 Dec 11 10:19:39 crc kubenswrapper[4663]: I1211 10:19:39.951692 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-qv7jl" event={"ID":"43cbb02f-aa8f-41aa-bd0a-8042aca3002c","Type":"ContainerDied","Data":"60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73"} Dec 11 10:19:39 crc kubenswrapper[4663]: I1211 10:19:39.951725 4663 scope.go:117] "RemoveContainer" containerID="fb85226c32cc03f4667a27f4596ce501400b323c88b347baab7574fe8f9e056c" Dec 11 10:19:39 crc kubenswrapper[4663]: I1211 10:19:39.952059 4663 scope.go:117] "RemoveContainer" containerID="60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73" Dec 11 10:19:39 crc kubenswrapper[4663]: E1211 10:19:39.952228 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-qv7jl_openshift-multus(43cbb02f-aa8f-41aa-bd0a-8042aca3002c)\"" pod="openshift-multus/multus-qv7jl" podUID="43cbb02f-aa8f-41aa-bd0a-8042aca3002c" Dec 11 10:19:40 crc kubenswrapper[4663]: I1211 10:19:40.504397 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:40 crc kubenswrapper[4663]: E1211 10:19:40.504657 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:40 crc kubenswrapper[4663]: I1211 10:19:40.504488 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:40 crc kubenswrapper[4663]: E1211 10:19:40.504835 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:40 crc kubenswrapper[4663]: I1211 10:19:40.955506 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-qv7jl_43cbb02f-aa8f-41aa-bd0a-8042aca3002c/kube-multus/1.log" Dec 11 10:19:41 crc kubenswrapper[4663]: I1211 10:19:41.504669 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:41 crc kubenswrapper[4663]: I1211 10:19:41.504708 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:41 crc kubenswrapper[4663]: E1211 10:19:41.504791 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:41 crc kubenswrapper[4663]: E1211 10:19:41.504878 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:42 crc kubenswrapper[4663]: I1211 10:19:42.503785 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:42 crc kubenswrapper[4663]: I1211 10:19:42.503826 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:42 crc kubenswrapper[4663]: E1211 10:19:42.503911 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:42 crc kubenswrapper[4663]: E1211 10:19:42.504059 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:43 crc kubenswrapper[4663]: I1211 10:19:43.504125 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:43 crc kubenswrapper[4663]: I1211 10:19:43.504136 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:43 crc kubenswrapper[4663]: E1211 10:19:43.504332 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:43 crc kubenswrapper[4663]: E1211 10:19:43.504417 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:43 crc kubenswrapper[4663]: I1211 10:19:43.505205 4663 scope.go:117] "RemoveContainer" containerID="0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e" Dec 11 10:19:43 crc kubenswrapper[4663]: I1211 10:19:43.964489 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/3.log" Dec 11 10:19:43 crc kubenswrapper[4663]: I1211 10:19:43.966522 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerStarted","Data":"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6"} Dec 11 10:19:43 crc kubenswrapper[4663]: I1211 10:19:43.966878 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:19:43 crc kubenswrapper[4663]: I1211 10:19:43.988817 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podStartSLOduration=97.98879908 podStartE2EDuration="1m37.98879908s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:19:43.986612268 +0000 UTC m=+114.804542053" watchObservedRunningTime="2025-12-11 10:19:43.98879908 +0000 UTC m=+114.806728864" Dec 11 10:19:44 crc kubenswrapper[4663]: I1211 10:19:44.161414 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-5r5q6"] Dec 11 10:19:44 crc kubenswrapper[4663]: I1211 10:19:44.161544 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:44 crc kubenswrapper[4663]: E1211 10:19:44.161623 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:44 crc kubenswrapper[4663]: I1211 10:19:44.503783 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:44 crc kubenswrapper[4663]: E1211 10:19:44.504052 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:44 crc kubenswrapper[4663]: I1211 10:19:44.503785 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:44 crc kubenswrapper[4663]: E1211 10:19:44.504169 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:45 crc kubenswrapper[4663]: I1211 10:19:45.504509 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:45 crc kubenswrapper[4663]: I1211 10:19:45.504615 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:45 crc kubenswrapper[4663]: E1211 10:19:45.504723 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:45 crc kubenswrapper[4663]: E1211 10:19:45.504847 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:46 crc kubenswrapper[4663]: I1211 10:19:46.504279 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:46 crc kubenswrapper[4663]: I1211 10:19:46.504297 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:46 crc kubenswrapper[4663]: E1211 10:19:46.504436 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:46 crc kubenswrapper[4663]: E1211 10:19:46.504559 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:47 crc kubenswrapper[4663]: I1211 10:19:47.503782 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:47 crc kubenswrapper[4663]: I1211 10:19:47.503844 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:47 crc kubenswrapper[4663]: E1211 10:19:47.503940 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:47 crc kubenswrapper[4663]: E1211 10:19:47.504110 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:48 crc kubenswrapper[4663]: I1211 10:19:48.503892 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:48 crc kubenswrapper[4663]: I1211 10:19:48.503928 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:48 crc kubenswrapper[4663]: E1211 10:19:48.504014 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:48 crc kubenswrapper[4663]: E1211 10:19:48.504093 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:49 crc kubenswrapper[4663]: E1211 10:19:49.473866 4663 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 11 10:19:49 crc kubenswrapper[4663]: I1211 10:19:49.504476 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:49 crc kubenswrapper[4663]: I1211 10:19:49.504528 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:49 crc kubenswrapper[4663]: E1211 10:19:49.505364 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:49 crc kubenswrapper[4663]: E1211 10:19:49.505431 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:49 crc kubenswrapper[4663]: E1211 10:19:49.579123 4663 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 11 10:19:50 crc kubenswrapper[4663]: I1211 10:19:50.503803 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:50 crc kubenswrapper[4663]: I1211 10:19:50.503906 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:50 crc kubenswrapper[4663]: E1211 10:19:50.503990 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:50 crc kubenswrapper[4663]: E1211 10:19:50.504093 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:51 crc kubenswrapper[4663]: I1211 10:19:51.504461 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:51 crc kubenswrapper[4663]: E1211 10:19:51.504596 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:51 crc kubenswrapper[4663]: I1211 10:19:51.504883 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:51 crc kubenswrapper[4663]: E1211 10:19:51.504940 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:52 crc kubenswrapper[4663]: I1211 10:19:52.504307 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:52 crc kubenswrapper[4663]: I1211 10:19:52.504427 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:52 crc kubenswrapper[4663]: E1211 10:19:52.504469 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:52 crc kubenswrapper[4663]: E1211 10:19:52.504581 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:53 crc kubenswrapper[4663]: I1211 10:19:53.504336 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:53 crc kubenswrapper[4663]: I1211 10:19:53.504341 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:53 crc kubenswrapper[4663]: E1211 10:19:53.504495 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:53 crc kubenswrapper[4663]: E1211 10:19:53.504597 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:54 crc kubenswrapper[4663]: I1211 10:19:54.504407 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:54 crc kubenswrapper[4663]: I1211 10:19:54.504407 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:54 crc kubenswrapper[4663]: E1211 10:19:54.504696 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:54 crc kubenswrapper[4663]: E1211 10:19:54.504524 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:54 crc kubenswrapper[4663]: I1211 10:19:54.505048 4663 scope.go:117] "RemoveContainer" containerID="60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73" Dec 11 10:19:54 crc kubenswrapper[4663]: E1211 10:19:54.580720 4663 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 11 10:19:54 crc kubenswrapper[4663]: I1211 10:19:54.996961 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-qv7jl_43cbb02f-aa8f-41aa-bd0a-8042aca3002c/kube-multus/1.log" Dec 11 10:19:54 crc kubenswrapper[4663]: I1211 10:19:54.997015 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-qv7jl" event={"ID":"43cbb02f-aa8f-41aa-bd0a-8042aca3002c","Type":"ContainerStarted","Data":"bbef5c59739cbf708221920dc1d3eba9725c081535cabff905d9eddf68fda315"} Dec 11 10:19:55 crc kubenswrapper[4663]: I1211 10:19:55.504129 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:55 crc kubenswrapper[4663]: I1211 10:19:55.504255 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:55 crc kubenswrapper[4663]: E1211 10:19:55.504316 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:55 crc kubenswrapper[4663]: E1211 10:19:55.504404 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:56 crc kubenswrapper[4663]: I1211 10:19:56.504440 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:56 crc kubenswrapper[4663]: I1211 10:19:56.504495 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:56 crc kubenswrapper[4663]: E1211 10:19:56.504629 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:56 crc kubenswrapper[4663]: E1211 10:19:56.504760 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:57 crc kubenswrapper[4663]: I1211 10:19:57.504666 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:57 crc kubenswrapper[4663]: I1211 10:19:57.504722 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:57 crc kubenswrapper[4663]: E1211 10:19:57.504884 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:57 crc kubenswrapper[4663]: E1211 10:19:57.504984 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:19:58 crc kubenswrapper[4663]: I1211 10:19:58.504576 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:19:58 crc kubenswrapper[4663]: I1211 10:19:58.504622 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:19:58 crc kubenswrapper[4663]: E1211 10:19:58.504685 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 10:19:58 crc kubenswrapper[4663]: E1211 10:19:58.504825 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 10:19:59 crc kubenswrapper[4663]: I1211 10:19:59.504351 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:19:59 crc kubenswrapper[4663]: E1211 10:19:59.505504 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5r5q6" podUID="54c64914-8d10-48bc-a755-1f7257400a32" Dec 11 10:19:59 crc kubenswrapper[4663]: I1211 10:19:59.505596 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:19:59 crc kubenswrapper[4663]: E1211 10:19:59.505696 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 10:20:00 crc kubenswrapper[4663]: I1211 10:20:00.504480 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:20:00 crc kubenswrapper[4663]: I1211 10:20:00.504638 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:20:00 crc kubenswrapper[4663]: I1211 10:20:00.506534 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 11 10:20:00 crc kubenswrapper[4663]: I1211 10:20:00.506556 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 11 10:20:00 crc kubenswrapper[4663]: I1211 10:20:00.507224 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 11 10:20:00 crc kubenswrapper[4663]: I1211 10:20:00.507227 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 11 10:20:01 crc kubenswrapper[4663]: I1211 10:20:01.504437 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:20:01 crc kubenswrapper[4663]: I1211 10:20:01.504437 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:20:01 crc kubenswrapper[4663]: I1211 10:20:01.506708 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 11 10:20:01 crc kubenswrapper[4663]: I1211 10:20:01.507137 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.324744 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.325717 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.355441 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-n6dwq"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.355838 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.356053 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.356397 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.356622 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-9cvs5"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.356964 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.357217 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.357436 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.357778 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-xhhrv"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.358130 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.359977 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.360296 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.362027 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cgckf"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.362295 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.364895 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.365039 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.366354 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.366495 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.366682 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.369758 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.369809 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-cbp5t"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370077 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370164 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370223 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370376 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370399 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370472 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 11 10:20:02 crc kubenswrapper[4663]: W1211 10:20:02.370559 4663 reflector.go:561] object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2": failed to list *v1.Secret: secrets "route-controller-manager-sa-dockercfg-h2zr2" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370571 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 11 10:20:02 crc kubenswrapper[4663]: E1211 10:20:02.370588 4663 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"route-controller-manager-sa-dockercfg-h2zr2\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"route-controller-manager-sa-dockercfg-h2zr2\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 10:20:02 crc kubenswrapper[4663]: W1211 10:20:02.370628 4663 reflector.go:561] object-"openshift-route-controller-manager"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 11 10:20:02 crc kubenswrapper[4663]: E1211 10:20:02.370638 4663 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370744 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370796 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370821 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370885 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 11 10:20:02 crc kubenswrapper[4663]: W1211 10:20:02.370899 4663 reflector.go:561] object-"openshift-route-controller-manager"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370936 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370942 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 11 10:20:02 crc kubenswrapper[4663]: W1211 10:20:02.370903 4663 reflector.go:561] object-"openshift-route-controller-manager"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 11 10:20:02 crc kubenswrapper[4663]: E1211 10:20:02.370962 4663 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370959 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: E1211 10:20:02.370940 4663 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.370966 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.371034 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.371107 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.371115 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.371172 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.371237 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.371656 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.371673 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.371750 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.371802 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.372030 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.372230 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.372248 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.372389 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-g29lh"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.372496 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.372516 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: W1211 10:20:02.372566 4663 reflector.go:561] object-"openshift-route-controller-manager"/"client-ca": failed to list *v1.ConfigMap: configmaps "client-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.373091 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.373221 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 11 10:20:02 crc kubenswrapper[4663]: E1211 10:20:02.374292 4663 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"client-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"client-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.374411 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.374545 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.374672 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.375273 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.376335 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.377231 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.377820 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.377999 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.378209 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.378823 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-mn2pm"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.379212 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.379270 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.379841 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.380232 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.380264 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-mn2pm" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.384351 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.384838 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.384924 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.401008 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.401237 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.401525 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.401764 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.401815 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.401900 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.401939 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.401762 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.402015 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.402543 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-dc8cp"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.402788 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.405010 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.405106 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.405260 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.405346 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.405605 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.406742 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-mxhbc"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.407281 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.408003 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.408080 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.409845 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.410571 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.411834 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-75lmn"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.411955 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.412335 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-n9pwg"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.412598 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.412915 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.413279 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.413334 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.413472 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.415055 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-p968m"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.415446 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.415583 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.415634 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.415950 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.416009 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.416228 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.416282 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.416288 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.416590 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.417034 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.417451 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.417729 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.417879 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.418026 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-24p95"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.418399 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.419384 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.419867 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.426416 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.426633 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.426793 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.427364 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.427451 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.428434 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.428510 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.428560 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.429243 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.429400 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.429631 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.429776 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.430034 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.430296 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.430438 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.430604 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.431760 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.431906 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.432083 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.448550 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.448892 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.449007 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.449238 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.449324 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.449633 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.450235 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-rw7qg"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.450807 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.451130 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-rw7qg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.451290 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.451909 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.452173 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.452199 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.454802 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.454958 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.455668 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.457936 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.458435 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.462223 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.462836 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463630 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463665 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/26fa94f3-6f45-4641-a0b5-b027b7db54c3-etcd-client\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463687 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-serving-cert\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463703 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-encryption-config\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463721 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463741 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b278aa37-9fc1-430d-9182-2dbf0f56b930-auth-proxy-config\") pod \"machine-approver-56656f9798-26pzr\" (UID: \"b278aa37-9fc1-430d-9182-2dbf0f56b930\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463777 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-audit\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463794 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhpvj\" (UniqueName: \"kubernetes.io/projected/b278aa37-9fc1-430d-9182-2dbf0f56b930-kube-api-access-lhpvj\") pod \"machine-approver-56656f9798-26pzr\" (UID: \"b278aa37-9fc1-430d-9182-2dbf0f56b930\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463810 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463825 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktlbb\" (UniqueName: \"kubernetes.io/projected/e3cf153e-0d08-489d-8d35-1170356831ee-kube-api-access-ktlbb\") pod \"openshift-config-operator-7777fb866f-hvrvb\" (UID: \"e3cf153e-0d08-489d-8d35-1170356831ee\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463842 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14f276e8-14d3-4e67-956d-ad6212f45c69-serving-cert\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463860 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463878 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-config\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463892 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1001a5ac-b44d-41c2-979f-a987c9c6d63f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-g29lh\" (UID: \"1001a5ac-b44d-41c2-979f-a987c9c6d63f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463906 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/14c24a3b-a966-47ff-a122-50abb423e4b3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mzhff\" (UID: \"14c24a3b-a966-47ff-a122-50abb423e4b3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463920 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-etcd-client\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463937 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99b2469b-932c-4a11-be99-ace72cff3d56-config\") pod \"console-operator-58897d9998-xhhrv\" (UID: \"99b2469b-932c-4a11-be99-ace72cff3d56\") " pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463953 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnlld\" (UniqueName: \"kubernetes.io/projected/46f7ebf5-734d-41f6-a8eb-1daf1dc0e014-kube-api-access-jnlld\") pod \"downloads-7954f5f757-mn2pm\" (UID: \"46f7ebf5-734d-41f6-a8eb-1daf1dc0e014\") " pod="openshift-console/downloads-7954f5f757-mn2pm" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463967 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-client-ca\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.463985 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464000 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464014 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-service-ca-bundle\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464028 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-serving-cert\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464042 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-etcd-serving-ca\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464058 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6rl2\" (UniqueName: \"kubernetes.io/projected/14f276e8-14d3-4e67-956d-ad6212f45c69-kube-api-access-x6rl2\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464075 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9g9k\" (UniqueName: \"kubernetes.io/projected/a4cc9117-6286-44ba-a590-893cff694a7c-kube-api-access-w9g9k\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464090 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-serving-cert\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464105 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a4cc9117-6286-44ba-a590-893cff694a7c-audit-dir\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464151 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/99b2469b-932c-4a11-be99-ace72cff3d56-trusted-ca\") pod \"console-operator-58897d9998-xhhrv\" (UID: \"99b2469b-932c-4a11-be99-ace72cff3d56\") " pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464168 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-config\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464198 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dztlb\" (UniqueName: \"kubernetes.io/projected/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-kube-api-access-dztlb\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464216 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464232 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-trusted-ca-bundle\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464259 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464275 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464316 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-image-import-ca\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464331 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26fa94f3-6f45-4641-a0b5-b027b7db54c3-serving-cert\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464347 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdpvq\" (UniqueName: \"kubernetes.io/projected/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-kube-api-access-jdpvq\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464360 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99b2469b-932c-4a11-be99-ace72cff3d56-serving-cert\") pod \"console-operator-58897d9998-xhhrv\" (UID: \"99b2469b-932c-4a11-be99-ace72cff3d56\") " pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464374 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/26fa94f3-6f45-4641-a0b5-b027b7db54c3-audit-dir\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464389 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/26fa94f3-6f45-4641-a0b5-b027b7db54c3-node-pullsecrets\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464419 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464437 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3cf153e-0d08-489d-8d35-1170356831ee-serving-cert\") pod \"openshift-config-operator-7777fb866f-hvrvb\" (UID: \"e3cf153e-0d08-489d-8d35-1170356831ee\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464453 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-config\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464469 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-audit-policies\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464496 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464510 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-config\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464524 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/26fa94f3-6f45-4641-a0b5-b027b7db54c3-encryption-config\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464539 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/b278aa37-9fc1-430d-9182-2dbf0f56b930-machine-approver-tls\") pod \"machine-approver-56656f9798-26pzr\" (UID: \"b278aa37-9fc1-430d-9182-2dbf0f56b930\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464558 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1001a5ac-b44d-41c2-979f-a987c9c6d63f-config\") pod \"machine-api-operator-5694c8668f-g29lh\" (UID: \"1001a5ac-b44d-41c2-979f-a987c9c6d63f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464582 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464597 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e3cf153e-0d08-489d-8d35-1170356831ee-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hvrvb\" (UID: \"e3cf153e-0d08-489d-8d35-1170356831ee\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464613 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf224\" (UniqueName: \"kubernetes.io/projected/99b2469b-932c-4a11-be99-ace72cff3d56-kube-api-access-hf224\") pod \"console-operator-58897d9998-xhhrv\" (UID: \"99b2469b-932c-4a11-be99-ace72cff3d56\") " pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464627 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5vgk\" (UniqueName: \"kubernetes.io/projected/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-kube-api-access-l5vgk\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464643 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464658 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1001a5ac-b44d-41c2-979f-a987c9c6d63f-images\") pod \"machine-api-operator-5694c8668f-g29lh\" (UID: \"1001a5ac-b44d-41c2-979f-a987c9c6d63f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464673 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464688 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9bdr\" (UniqueName: \"kubernetes.io/projected/26fa94f3-6f45-4641-a0b5-b027b7db54c3-kube-api-access-z9bdr\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464702 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b278aa37-9fc1-430d-9182-2dbf0f56b930-config\") pod \"machine-approver-56656f9798-26pzr\" (UID: \"b278aa37-9fc1-430d-9182-2dbf0f56b930\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464716 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fslg7\" (UniqueName: \"kubernetes.io/projected/14c24a3b-a966-47ff-a122-50abb423e4b3-kube-api-access-fslg7\") pod \"cluster-samples-operator-665b6dd947-mzhff\" (UID: \"14c24a3b-a966-47ff-a122-50abb423e4b3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464737 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.464751 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-audit-dir\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.465094 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-audit-policies\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.465120 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-client-ca\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.465151 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jgl4\" (UniqueName: \"kubernetes.io/projected/1001a5ac-b44d-41c2-979f-a987c9c6d63f-kube-api-access-5jgl4\") pod \"machine-api-operator-5694c8668f-g29lh\" (UID: \"1001a5ac-b44d-41c2-979f-a987c9c6d63f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.465500 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.466081 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.467149 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cb2kn"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.467639 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-bf8b4"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.467972 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.468170 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.468702 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.472526 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.473077 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.473461 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.473683 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.474365 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.474682 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.480230 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.487322 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.487475 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-nfh76"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.488020 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.488031 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.488949 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.489519 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-n6dwq"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.490443 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-9cvs5"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.491299 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vsdq4"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.491964 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vsdq4" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.492079 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-75lmn"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.493839 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.495009 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-g29lh"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.496153 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dc8cp"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.496596 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.497483 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-p968m"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.506128 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.506850 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.508766 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.510262 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-24p95"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.511254 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.512840 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-xhhrv"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.514854 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cgckf"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.518701 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9nsfq"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.519674 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.519774 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.520798 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.522442 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.522740 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-cbp5t"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.524074 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.525289 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.526272 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.526589 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.527091 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.527966 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.530050 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-bf8b4"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.530825 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-mn2pm"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.531716 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.532594 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-mxhbc"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.533772 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.534552 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-rw7qg"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.535202 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-vm8rp"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.536014 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.536323 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vsdq4"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.537891 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-bnmt9"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.538365 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-bnmt9" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.538760 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.540027 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cb2kn"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.540906 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.541981 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-bnmt9"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.543073 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.544049 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9nsfq"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.544887 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-vm8rp"] Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.546040 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.566608 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.566759 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-serving-cert\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.566804 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9813fa82-894c-4380-8975-24771adf79e7-profile-collector-cert\") pod \"catalog-operator-68c6474976-zxbz5\" (UID: \"9813fa82-894c-4380-8975-24771adf79e7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.566831 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bb618f2-adb1-40ff-a336-2520519233b7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-x8ln7\" (UID: \"2bb618f2-adb1-40ff-a336-2520519233b7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.566850 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-oauth-config\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.566869 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ggzg\" (UniqueName: \"kubernetes.io/projected/507a4649-4e86-435b-8ceb-9587a90bc48c-kube-api-access-5ggzg\") pod \"packageserver-d55dfcdfc-25wmw\" (UID: \"507a4649-4e86-435b-8ceb-9587a90bc48c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.566892 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/99b2469b-932c-4a11-be99-ace72cff3d56-trusted-ca\") pod \"console-operator-58897d9998-xhhrv\" (UID: \"99b2469b-932c-4a11-be99-ace72cff3d56\") " pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.566913 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05009b2f-f136-4f9d-8d58-169816bb1ea6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-24p95\" (UID: \"05009b2f-f136-4f9d-8d58-169816bb1ea6\") " pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.566964 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-trusted-ca-bundle\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.566994 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1fc12aa7-09ac-4720-b32e-920301ca04ee-images\") pod \"machine-config-operator-74547568cd-fdg9t\" (UID: \"1fc12aa7-09ac-4720-b32e-920301ca04ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567024 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a76c011-1596-4893-b01f-abf8e6e5295c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9kh7f\" (UID: \"2a76c011-1596-4893-b01f-abf8e6e5295c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567065 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fc42c87c-f305-40f2-8d38-c9ef45c05bd0-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nn574\" (UID: \"fc42c87c-f305-40f2-8d38-c9ef45c05bd0\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567095 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3885a18d-c5e3-484e-8950-878e57b1bb43-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-zwlfp\" (UID: \"3885a18d-c5e3-484e-8950-878e57b1bb43\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567119 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2txq\" (UniqueName: \"kubernetes.io/projected/3885a18d-c5e3-484e-8950-878e57b1bb43-kube-api-access-c2txq\") pod \"kube-storage-version-migrator-operator-b67b599dd-zwlfp\" (UID: \"3885a18d-c5e3-484e-8950-878e57b1bb43\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567163 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26fa94f3-6f45-4641-a0b5-b027b7db54c3-serving-cert\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567199 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/83732716-a7c1-4be4-a73a-1d7a9633af29-etcd-client\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567218 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/58945b3f-8314-4251-a1e1-20dff39bf316-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-kw96j\" (UID: \"58945b3f-8314-4251-a1e1-20dff39bf316\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567236 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-746jm\" (UniqueName: \"kubernetes.io/projected/d87db89f-40fd-4e1c-a959-e167edef0c19-kube-api-access-746jm\") pod \"service-ca-9c57cc56f-bf8b4\" (UID: \"d87db89f-40fd-4e1c-a959-e167edef0c19\") " pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567354 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/26fa94f3-6f45-4641-a0b5-b027b7db54c3-audit-dir\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567393 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/26fa94f3-6f45-4641-a0b5-b027b7db54c3-audit-dir\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567414 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc42c87c-f305-40f2-8d38-c9ef45c05bd0-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nn574\" (UID: \"fc42c87c-f305-40f2-8d38-c9ef45c05bd0\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567440 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb77h\" (UniqueName: \"kubernetes.io/projected/3a676c5e-7428-4f37-9aa2-0755611c6ff3-kube-api-access-xb77h\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567461 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b16defcf-1335-4210-a647-bcaef5e0642f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-pnbrx\" (UID: \"b16defcf-1335-4210-a647-bcaef5e0642f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567502 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567536 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567555 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3a676c5e-7428-4f37-9aa2-0755611c6ff3-stats-auth\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567575 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-config\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567598 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-serving-cert\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567616 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ght7c\" (UniqueName: \"kubernetes.io/projected/32a7449e-b310-406a-b03a-4e2e461d11ae-kube-api-access-ght7c\") pod \"control-plane-machine-set-operator-78cbb6b69f-fwjxr\" (UID: \"32a7449e-b310-406a-b03a-4e2e461d11ae\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567633 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/26fa94f3-6f45-4641-a0b5-b027b7db54c3-encryption-config\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567651 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/b278aa37-9fc1-430d-9182-2dbf0f56b930-machine-approver-tls\") pod \"machine-approver-56656f9798-26pzr\" (UID: \"b278aa37-9fc1-430d-9182-2dbf0f56b930\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567671 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pvvg\" (UniqueName: \"kubernetes.io/projected/9813fa82-894c-4380-8975-24771adf79e7-kube-api-access-6pvvg\") pod \"catalog-operator-68c6474976-zxbz5\" (UID: \"9813fa82-894c-4380-8975-24771adf79e7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567692 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e3cf153e-0d08-489d-8d35-1170356831ee-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hvrvb\" (UID: \"e3cf153e-0d08-489d-8d35-1170356831ee\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567709 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1001a5ac-b44d-41c2-979f-a987c9c6d63f-config\") pod \"machine-api-operator-5694c8668f-g29lh\" (UID: \"1001a5ac-b44d-41c2-979f-a987c9c6d63f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567728 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567747 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9813fa82-894c-4380-8975-24771adf79e7-srv-cert\") pod \"catalog-operator-68c6474976-zxbz5\" (UID: \"9813fa82-894c-4380-8975-24771adf79e7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567763 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f64a0f1e-3264-4862-ad3b-097bf139855c-srv-cert\") pod \"olm-operator-6b444d44fb-zzlmb\" (UID: \"f64a0f1e-3264-4862-ad3b-097bf139855c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567782 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf224\" (UniqueName: \"kubernetes.io/projected/99b2469b-932c-4a11-be99-ace72cff3d56-kube-api-access-hf224\") pod \"console-operator-58897d9998-xhhrv\" (UID: \"99b2469b-932c-4a11-be99-ace72cff3d56\") " pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567796 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-oauth-serving-cert\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567813 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d87db89f-40fd-4e1c-a959-e167edef0c19-signing-cabundle\") pod \"service-ca-9c57cc56f-bf8b4\" (UID: \"d87db89f-40fd-4e1c-a959-e167edef0c19\") " pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567828 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1001a5ac-b44d-41c2-979f-a987c9c6d63f-images\") pod \"machine-api-operator-5694c8668f-g29lh\" (UID: \"1001a5ac-b44d-41c2-979f-a987c9c6d63f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567844 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/83732716-a7c1-4be4-a73a-1d7a9633af29-etcd-ca\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567860 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwbrw\" (UniqueName: \"kubernetes.io/projected/b15591c3-541e-4daf-8e21-9b69db5cf260-kube-api-access-dwbrw\") pod \"collect-profiles-29424135-v2t6r\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567872 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/99b2469b-932c-4a11-be99-ace72cff3d56-trusted-ca\") pod \"console-operator-58897d9998-xhhrv\" (UID: \"99b2469b-932c-4a11-be99-ace72cff3d56\") " pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.567875 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4fd2\" (UniqueName: \"kubernetes.io/projected/58945b3f-8314-4251-a1e1-20dff39bf316-kube-api-access-q4fd2\") pod \"cluster-image-registry-operator-dc59b4c8b-kw96j\" (UID: \"58945b3f-8314-4251-a1e1-20dff39bf316\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568008 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a35ea846-6e87-40fe-8998-037fb7bd2060-serving-cert\") pod \"service-ca-operator-777779d784-8mrgz\" (UID: \"a35ea846-6e87-40fe-8998-037fb7bd2060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568032 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st982\" (UniqueName: \"kubernetes.io/projected/83732716-a7c1-4be4-a73a-1d7a9633af29-kube-api-access-st982\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568053 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568070 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2bb618f2-adb1-40ff-a336-2520519233b7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-x8ln7\" (UID: \"2bb618f2-adb1-40ff-a336-2520519233b7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568088 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9bdr\" (UniqueName: \"kubernetes.io/projected/26fa94f3-6f45-4641-a0b5-b027b7db54c3-kube-api-access-z9bdr\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568108 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b278aa37-9fc1-430d-9182-2dbf0f56b930-config\") pod \"machine-approver-56656f9798-26pzr\" (UID: \"b278aa37-9fc1-430d-9182-2dbf0f56b930\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568124 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-audit-dir\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568158 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhrrz\" (UniqueName: \"kubernetes.io/projected/10c2efee-1b70-4258-92f0-caba3a5cfd50-kube-api-access-bhrrz\") pod \"package-server-manager-789f6589d5-jnb74\" (UID: \"10c2efee-1b70-4258-92f0-caba3a5cfd50\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568196 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1fc12aa7-09ac-4720-b32e-920301ca04ee-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fdg9t\" (UID: \"1fc12aa7-09ac-4720-b32e-920301ca04ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568218 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568462 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-audit-dir\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568623 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e3cf153e-0d08-489d-8d35-1170356831ee-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hvrvb\" (UID: \"e3cf153e-0d08-489d-8d35-1170356831ee\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568965 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b278aa37-9fc1-430d-9182-2dbf0f56b930-config\") pod \"machine-approver-56656f9798-26pzr\" (UID: \"b278aa37-9fc1-430d-9182-2dbf0f56b930\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.569136 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-config\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.569387 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1001a5ac-b44d-41c2-979f-a987c9c6d63f-images\") pod \"machine-api-operator-5694c8668f-g29lh\" (UID: \"1001a5ac-b44d-41c2-979f-a987c9c6d63f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.569514 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.569589 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.569836 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1001a5ac-b44d-41c2-979f-a987c9c6d63f-config\") pod \"machine-api-operator-5694c8668f-g29lh\" (UID: \"1001a5ac-b44d-41c2-979f-a987c9c6d63f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.569872 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.568239 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f64a0f1e-3264-4862-ad3b-097bf139855c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-zzlmb\" (UID: \"f64a0f1e-3264-4862-ad3b-097bf139855c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570473 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-audit-policies\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570500 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/58945b3f-8314-4251-a1e1-20dff39bf316-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-kw96j\" (UID: \"58945b3f-8314-4251-a1e1-20dff39bf316\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570516 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/83732716-a7c1-4be4-a73a-1d7a9633af29-serving-cert\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570532 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a76c011-1596-4893-b01f-abf8e6e5295c-config\") pod \"kube-apiserver-operator-766d6c64bb-9kh7f\" (UID: \"2a76c011-1596-4893-b01f-abf8e6e5295c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570550 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js874\" (UniqueName: \"kubernetes.io/projected/a35ea846-6e87-40fe-8998-037fb7bd2060-kube-api-access-js874\") pod \"service-ca-operator-777779d784-8mrgz\" (UID: \"a35ea846-6e87-40fe-8998-037fb7bd2060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570567 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44eee7e7-6cdd-40bf-a998-ed2c4ae7884a-trusted-ca\") pod \"ingress-operator-5b745b69d9-p968m\" (UID: \"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570587 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-audit\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570604 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/58945b3f-8314-4251-a1e1-20dff39bf316-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-kw96j\" (UID: \"58945b3f-8314-4251-a1e1-20dff39bf316\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570623 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1fc12aa7-09ac-4720-b32e-920301ca04ee-proxy-tls\") pod \"machine-config-operator-74547568cd-fdg9t\" (UID: \"1fc12aa7-09ac-4720-b32e-920301ca04ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570641 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/83732716-a7c1-4be4-a73a-1d7a9633af29-etcd-service-ca\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570657 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-etcd-client\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570672 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b15591c3-541e-4daf-8e21-9b69db5cf260-config-volume\") pod \"collect-profiles-29424135-v2t6r\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570689 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-client-ca\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570707 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/507a4649-4e86-435b-8ceb-9587a90bc48c-tmpfs\") pod \"packageserver-d55dfcdfc-25wmw\" (UID: \"507a4649-4e86-435b-8ceb-9587a90bc48c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570723 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-service-ca-bundle\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570738 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skwwp\" (UniqueName: \"kubernetes.io/projected/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-kube-api-access-skwwp\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570753 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/32a7449e-b310-406a-b03a-4e2e461d11ae-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fwjxr\" (UID: \"32a7449e-b310-406a-b03a-4e2e461d11ae\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570770 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-etcd-serving-ca\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570786 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6rl2\" (UniqueName: \"kubernetes.io/projected/14f276e8-14d3-4e67-956d-ad6212f45c69-kube-api-access-x6rl2\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570800 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q8xw\" (UniqueName: \"kubernetes.io/projected/44eee7e7-6cdd-40bf-a998-ed2c4ae7884a-kube-api-access-4q8xw\") pod \"ingress-operator-5b745b69d9-p968m\" (UID: \"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570817 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9g9k\" (UniqueName: \"kubernetes.io/projected/a4cc9117-6286-44ba-a590-893cff694a7c-kube-api-access-w9g9k\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570836 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a4cc9117-6286-44ba-a590-893cff694a7c-audit-dir\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570851 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-config\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570866 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/44909f31-73c1-4282-a2f6-e93bc1bf6d32-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-rw7qg\" (UID: \"44909f31-73c1-4282-a2f6-e93bc1bf6d32\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rw7qg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570883 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-config\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570896 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-service-ca\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570910 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dztlb\" (UniqueName: \"kubernetes.io/projected/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-kube-api-access-dztlb\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570926 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570943 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-trusted-ca-bundle\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570960 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.570976 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571004 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-image-import-ca\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571022 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99b2469b-932c-4a11-be99-ace72cff3d56-serving-cert\") pod \"console-operator-58897d9998-xhhrv\" (UID: \"99b2469b-932c-4a11-be99-ace72cff3d56\") " pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571038 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdpvq\" (UniqueName: \"kubernetes.io/projected/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-kube-api-access-jdpvq\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571062 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/26fa94f3-6f45-4641-a0b5-b027b7db54c3-node-pullsecrets\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571067 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-audit-policies\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571077 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3cf153e-0d08-489d-8d35-1170356831ee-serving-cert\") pod \"openshift-config-operator-7777fb866f-hvrvb\" (UID: \"e3cf153e-0d08-489d-8d35-1170356831ee\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571090 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-audit-policies\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571114 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ffff689f-0318-485f-9f8c-a8f2f86eebaf-node-bootstrap-token\") pod \"machine-config-server-nfh76\" (UID: \"ffff689f-0318-485f-9f8c-a8f2f86eebaf\") " pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571133 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a4cc9117-6286-44ba-a590-893cff694a7c-audit-dir\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571129 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-config\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571177 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngs6l\" (UniqueName: \"kubernetes.io/projected/1fc12aa7-09ac-4720-b32e-920301ca04ee-kube-api-access-ngs6l\") pod \"machine-config-operator-74547568cd-fdg9t\" (UID: \"1fc12aa7-09ac-4720-b32e-920301ca04ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571217 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrbhv\" (UniqueName: \"kubernetes.io/projected/172795ed-fdc1-4567-ae54-f6980f7d2038-kube-api-access-wrbhv\") pod \"migrator-59844c95c7-vsdq4\" (UID: \"172795ed-fdc1-4567-ae54-f6980f7d2038\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vsdq4" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571233 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cdpd\" (UniqueName: \"kubernetes.io/projected/ffff689f-0318-485f-9f8c-a8f2f86eebaf-kube-api-access-9cdpd\") pod \"machine-config-server-nfh76\" (UID: \"ffff689f-0318-485f-9f8c-a8f2f86eebaf\") " pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571253 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5vgk\" (UniqueName: \"kubernetes.io/projected/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-kube-api-access-l5vgk\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571268 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3885a18d-c5e3-484e-8950-878e57b1bb43-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-zwlfp\" (UID: \"3885a18d-c5e3-484e-8950-878e57b1bb43\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571284 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571301 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a35ea846-6e87-40fe-8998-037fb7bd2060-config\") pod \"service-ca-operator-777779d784-8mrgz\" (UID: \"a35ea846-6e87-40fe-8998-037fb7bd2060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571316 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/905be650-0ed4-45ef-984c-00ab31ee178b-metrics-tls\") pod \"dns-operator-744455d44c-cb2kn\" (UID: \"905be650-0ed4-45ef-984c-00ab31ee178b\") " pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571335 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fslg7\" (UniqueName: \"kubernetes.io/projected/14c24a3b-a966-47ff-a122-50abb423e4b3-kube-api-access-fslg7\") pod \"cluster-samples-operator-665b6dd947-mzhff\" (UID: \"14c24a3b-a966-47ff-a122-50abb423e4b3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571350 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-client-ca\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571366 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jgl4\" (UniqueName: \"kubernetes.io/projected/1001a5ac-b44d-41c2-979f-a987c9c6d63f-kube-api-access-5jgl4\") pod \"machine-api-operator-5694c8668f-g29lh\" (UID: \"1001a5ac-b44d-41c2-979f-a987c9c6d63f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571381 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwbdm\" (UniqueName: \"kubernetes.io/projected/05009b2f-f136-4f9d-8d58-169816bb1ea6-kube-api-access-jwbdm\") pod \"marketplace-operator-79b997595-24p95\" (UID: \"05009b2f-f136-4f9d-8d58-169816bb1ea6\") " pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571398 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb5nx\" (UniqueName: \"kubernetes.io/projected/44909f31-73c1-4282-a2f6-e93bc1bf6d32-kube-api-access-tb5nx\") pod \"multus-admission-controller-857f4d67dd-rw7qg\" (UID: \"44909f31-73c1-4282-a2f6-e93bc1bf6d32\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rw7qg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571412 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/44eee7e7-6cdd-40bf-a998-ed2c4ae7884a-metrics-tls\") pod \"ingress-operator-5b745b69d9-p968m\" (UID: \"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571427 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b16defcf-1335-4210-a647-bcaef5e0642f-proxy-tls\") pod \"machine-config-controller-84d6567774-pnbrx\" (UID: \"b16defcf-1335-4210-a647-bcaef5e0642f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571443 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83732716-a7c1-4be4-a73a-1d7a9633af29-config\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.572125 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.572736 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/26fa94f3-6f45-4641-a0b5-b027b7db54c3-encryption-config\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.572916 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-image-import-ca\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.573268 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26fa94f3-6f45-4641-a0b5-b027b7db54c3-serving-cert\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.573391 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.573512 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-config\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.573606 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/05009b2f-f136-4f9d-8d58-169816bb1ea6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-24p95\" (UID: \"05009b2f-f136-4f9d-8d58-169816bb1ea6\") " pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.573656 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/26fa94f3-6f45-4641-a0b5-b027b7db54c3-etcd-client\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.573726 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-serving-cert\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.573762 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-encryption-config\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.573800 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6psh6\" (UniqueName: \"kubernetes.io/projected/2bb618f2-adb1-40ff-a336-2520519233b7-kube-api-access-6psh6\") pod \"openshift-apiserver-operator-796bbdcf4f-x8ln7\" (UID: \"2bb618f2-adb1-40ff-a336-2520519233b7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.573900 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-config\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574037 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574071 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d87db89f-40fd-4e1c-a959-e167edef0c19-signing-key\") pod \"service-ca-9c57cc56f-bf8b4\" (UID: \"d87db89f-40fd-4e1c-a959-e167edef0c19\") " pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574095 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3a676c5e-7428-4f37-9aa2-0755611c6ff3-default-certificate\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574116 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b278aa37-9fc1-430d-9182-2dbf0f56b930-auth-proxy-config\") pod \"machine-approver-56656f9798-26pzr\" (UID: \"b278aa37-9fc1-430d-9182-2dbf0f56b930\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574138 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44eee7e7-6cdd-40bf-a998-ed2c4ae7884a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-p968m\" (UID: \"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574172 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/507a4649-4e86-435b-8ceb-9587a90bc48c-apiservice-cert\") pod \"packageserver-d55dfcdfc-25wmw\" (UID: \"507a4649-4e86-435b-8ceb-9587a90bc48c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574210 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b15591c3-541e-4daf-8e21-9b69db5cf260-secret-volume\") pod \"collect-profiles-29424135-v2t6r\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574213 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-service-ca-bundle\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574233 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc42c87c-f305-40f2-8d38-c9ef45c05bd0-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nn574\" (UID: \"fc42c87c-f305-40f2-8d38-c9ef45c05bd0\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574268 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhpvj\" (UniqueName: \"kubernetes.io/projected/b278aa37-9fc1-430d-9182-2dbf0f56b930-kube-api-access-lhpvj\") pod \"machine-approver-56656f9798-26pzr\" (UID: \"b278aa37-9fc1-430d-9182-2dbf0f56b930\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574290 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/10c2efee-1b70-4258-92f0-caba3a5cfd50-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-jnb74\" (UID: \"10c2efee-1b70-4258-92f0-caba3a5cfd50\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574302 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/b278aa37-9fc1-430d-9182-2dbf0f56b930-machine-approver-tls\") pod \"machine-approver-56656f9798-26pzr\" (UID: \"b278aa37-9fc1-430d-9182-2dbf0f56b930\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574312 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcx62\" (UniqueName: \"kubernetes.io/projected/f64a0f1e-3264-4862-ad3b-097bf139855c-kube-api-access-dcx62\") pod \"olm-operator-6b444d44fb-zzlmb\" (UID: \"f64a0f1e-3264-4862-ad3b-097bf139855c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.571638 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-audit\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574769 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b278aa37-9fc1-430d-9182-2dbf0f56b930-auth-proxy-config\") pod \"machine-approver-56656f9798-26pzr\" (UID: \"b278aa37-9fc1-430d-9182-2dbf0f56b930\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574911 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574947 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktlbb\" (UniqueName: \"kubernetes.io/projected/e3cf153e-0d08-489d-8d35-1170356831ee-kube-api-access-ktlbb\") pod \"openshift-config-operator-7777fb866f-hvrvb\" (UID: \"e3cf153e-0d08-489d-8d35-1170356831ee\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574971 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14f276e8-14d3-4e67-956d-ad6212f45c69-serving-cert\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.574992 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575013 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a76c011-1596-4893-b01f-abf8e6e5295c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9kh7f\" (UID: \"2a76c011-1596-4893-b01f-abf8e6e5295c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575034 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c2sn\" (UniqueName: \"kubernetes.io/projected/905be650-0ed4-45ef-984c-00ab31ee178b-kube-api-access-9c2sn\") pod \"dns-operator-744455d44c-cb2kn\" (UID: \"905be650-0ed4-45ef-984c-00ab31ee178b\") " pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575058 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3a676c5e-7428-4f37-9aa2-0755611c6ff3-metrics-certs\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575281 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.573725 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-client-ca\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575298 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-etcd-serving-ca\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575394 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ffff689f-0318-485f-9f8c-a8f2f86eebaf-certs\") pod \"machine-config-server-nfh76\" (UID: \"ffff689f-0318-485f-9f8c-a8f2f86eebaf\") " pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575418 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-etcd-client\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575423 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3a676c5e-7428-4f37-9aa2-0755611c6ff3-service-ca-bundle\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575479 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-config\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575505 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1001a5ac-b44d-41c2-979f-a987c9c6d63f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-g29lh\" (UID: \"1001a5ac-b44d-41c2-979f-a987c9c6d63f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575528 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/14c24a3b-a966-47ff-a122-50abb423e4b3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mzhff\" (UID: \"14c24a3b-a966-47ff-a122-50abb423e4b3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575579 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfxmj\" (UniqueName: \"kubernetes.io/projected/b16defcf-1335-4210-a647-bcaef5e0642f-kube-api-access-pfxmj\") pod \"machine-config-controller-84d6567774-pnbrx\" (UID: \"b16defcf-1335-4210-a647-bcaef5e0642f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575606 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99b2469b-932c-4a11-be99-ace72cff3d56-config\") pod \"console-operator-58897d9998-xhhrv\" (UID: \"99b2469b-932c-4a11-be99-ace72cff3d56\") " pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575632 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnlld\" (UniqueName: \"kubernetes.io/projected/46f7ebf5-734d-41f6-a8eb-1daf1dc0e014-kube-api-access-jnlld\") pod \"downloads-7954f5f757-mn2pm\" (UID: \"46f7ebf5-734d-41f6-a8eb-1daf1dc0e014\") " pod="openshift-console/downloads-7954f5f757-mn2pm" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575655 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-serving-cert\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575674 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/507a4649-4e86-435b-8ceb-9587a90bc48c-webhook-cert\") pod \"packageserver-d55dfcdfc-25wmw\" (UID: \"507a4649-4e86-435b-8ceb-9587a90bc48c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.575705 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.576298 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/26fa94f3-6f45-4641-a0b5-b027b7db54c3-node-pullsecrets\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.576325 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.576756 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/26fa94f3-6f45-4641-a0b5-b027b7db54c3-trusted-ca-bundle\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.576906 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-audit-policies\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.577594 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.577680 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.577867 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-encryption-config\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.578347 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.578454 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.578567 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99b2469b-932c-4a11-be99-ace72cff3d56-config\") pod \"console-operator-58897d9998-xhhrv\" (UID: \"99b2469b-932c-4a11-be99-ace72cff3d56\") " pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.578844 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.579514 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1001a5ac-b44d-41c2-979f-a987c9c6d63f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-g29lh\" (UID: \"1001a5ac-b44d-41c2-979f-a987c9c6d63f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.579627 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.579934 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/14c24a3b-a966-47ff-a122-50abb423e4b3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mzhff\" (UID: \"14c24a3b-a966-47ff-a122-50abb423e4b3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.580701 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.582803 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/26fa94f3-6f45-4641-a0b5-b027b7db54c3-etcd-client\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.583027 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.583231 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3cf153e-0d08-489d-8d35-1170356831ee-serving-cert\") pod \"openshift-config-operator-7777fb866f-hvrvb\" (UID: \"e3cf153e-0d08-489d-8d35-1170356831ee\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.583351 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.587330 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14f276e8-14d3-4e67-956d-ad6212f45c69-serving-cert\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.587638 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.587775 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99b2469b-932c-4a11-be99-ace72cff3d56-serving-cert\") pod \"console-operator-58897d9998-xhhrv\" (UID: \"99b2469b-932c-4a11-be99-ace72cff3d56\") " pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.588010 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-serving-cert\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.588074 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-serving-cert\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.588084 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.606968 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.626537 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.646294 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.665892 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681594 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/507a4649-4e86-435b-8ceb-9587a90bc48c-webhook-cert\") pod \"packageserver-d55dfcdfc-25wmw\" (UID: \"507a4649-4e86-435b-8ceb-9587a90bc48c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681634 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9813fa82-894c-4380-8975-24771adf79e7-profile-collector-cert\") pod \"catalog-operator-68c6474976-zxbz5\" (UID: \"9813fa82-894c-4380-8975-24771adf79e7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681655 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-oauth-config\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681676 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ggzg\" (UniqueName: \"kubernetes.io/projected/507a4649-4e86-435b-8ceb-9587a90bc48c-kube-api-access-5ggzg\") pod \"packageserver-d55dfcdfc-25wmw\" (UID: \"507a4649-4e86-435b-8ceb-9587a90bc48c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681697 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bb618f2-adb1-40ff-a336-2520519233b7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-x8ln7\" (UID: \"2bb618f2-adb1-40ff-a336-2520519233b7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681720 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05009b2f-f136-4f9d-8d58-169816bb1ea6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-24p95\" (UID: \"05009b2f-f136-4f9d-8d58-169816bb1ea6\") " pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681741 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1fc12aa7-09ac-4720-b32e-920301ca04ee-images\") pod \"machine-config-operator-74547568cd-fdg9t\" (UID: \"1fc12aa7-09ac-4720-b32e-920301ca04ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681762 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-trusted-ca-bundle\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681786 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fc42c87c-f305-40f2-8d38-c9ef45c05bd0-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nn574\" (UID: \"fc42c87c-f305-40f2-8d38-c9ef45c05bd0\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681814 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3885a18d-c5e3-484e-8950-878e57b1bb43-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-zwlfp\" (UID: \"3885a18d-c5e3-484e-8950-878e57b1bb43\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681837 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2txq\" (UniqueName: \"kubernetes.io/projected/3885a18d-c5e3-484e-8950-878e57b1bb43-kube-api-access-c2txq\") pod \"kube-storage-version-migrator-operator-b67b599dd-zwlfp\" (UID: \"3885a18d-c5e3-484e-8950-878e57b1bb43\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681859 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a76c011-1596-4893-b01f-abf8e6e5295c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9kh7f\" (UID: \"2a76c011-1596-4893-b01f-abf8e6e5295c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681886 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/83732716-a7c1-4be4-a73a-1d7a9633af29-etcd-client\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681911 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/58945b3f-8314-4251-a1e1-20dff39bf316-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-kw96j\" (UID: \"58945b3f-8314-4251-a1e1-20dff39bf316\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681934 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-746jm\" (UniqueName: \"kubernetes.io/projected/d87db89f-40fd-4e1c-a959-e167edef0c19-kube-api-access-746jm\") pod \"service-ca-9c57cc56f-bf8b4\" (UID: \"d87db89f-40fd-4e1c-a959-e167edef0c19\") " pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681956 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc42c87c-f305-40f2-8d38-c9ef45c05bd0-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nn574\" (UID: \"fc42c87c-f305-40f2-8d38-c9ef45c05bd0\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.681976 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb77h\" (UniqueName: \"kubernetes.io/projected/3a676c5e-7428-4f37-9aa2-0755611c6ff3-kube-api-access-xb77h\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682010 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b16defcf-1335-4210-a647-bcaef5e0642f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-pnbrx\" (UID: \"b16defcf-1335-4210-a647-bcaef5e0642f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682032 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3a676c5e-7428-4f37-9aa2-0755611c6ff3-stats-auth\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682052 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-serving-cert\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682074 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ght7c\" (UniqueName: \"kubernetes.io/projected/32a7449e-b310-406a-b03a-4e2e461d11ae-kube-api-access-ght7c\") pod \"control-plane-machine-set-operator-78cbb6b69f-fwjxr\" (UID: \"32a7449e-b310-406a-b03a-4e2e461d11ae\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682095 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pvvg\" (UniqueName: \"kubernetes.io/projected/9813fa82-894c-4380-8975-24771adf79e7-kube-api-access-6pvvg\") pod \"catalog-operator-68c6474976-zxbz5\" (UID: \"9813fa82-894c-4380-8975-24771adf79e7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682117 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9813fa82-894c-4380-8975-24771adf79e7-srv-cert\") pod \"catalog-operator-68c6474976-zxbz5\" (UID: \"9813fa82-894c-4380-8975-24771adf79e7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682145 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-oauth-serving-cert\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682200 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f64a0f1e-3264-4862-ad3b-097bf139855c-srv-cert\") pod \"olm-operator-6b444d44fb-zzlmb\" (UID: \"f64a0f1e-3264-4862-ad3b-097bf139855c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682219 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/83732716-a7c1-4be4-a73a-1d7a9633af29-etcd-ca\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682241 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwbrw\" (UniqueName: \"kubernetes.io/projected/b15591c3-541e-4daf-8e21-9b69db5cf260-kube-api-access-dwbrw\") pod \"collect-profiles-29424135-v2t6r\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682260 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d87db89f-40fd-4e1c-a959-e167edef0c19-signing-cabundle\") pod \"service-ca-9c57cc56f-bf8b4\" (UID: \"d87db89f-40fd-4e1c-a959-e167edef0c19\") " pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682280 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a35ea846-6e87-40fe-8998-037fb7bd2060-serving-cert\") pod \"service-ca-operator-777779d784-8mrgz\" (UID: \"a35ea846-6e87-40fe-8998-037fb7bd2060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682301 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st982\" (UniqueName: \"kubernetes.io/projected/83732716-a7c1-4be4-a73a-1d7a9633af29-kube-api-access-st982\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682323 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4fd2\" (UniqueName: \"kubernetes.io/projected/58945b3f-8314-4251-a1e1-20dff39bf316-kube-api-access-q4fd2\") pod \"cluster-image-registry-operator-dc59b4c8b-kw96j\" (UID: \"58945b3f-8314-4251-a1e1-20dff39bf316\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682340 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2bb618f2-adb1-40ff-a336-2520519233b7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-x8ln7\" (UID: \"2bb618f2-adb1-40ff-a336-2520519233b7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682359 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhrrz\" (UniqueName: \"kubernetes.io/projected/10c2efee-1b70-4258-92f0-caba3a5cfd50-kube-api-access-bhrrz\") pod \"package-server-manager-789f6589d5-jnb74\" (UID: \"10c2efee-1b70-4258-92f0-caba3a5cfd50\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682378 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1fc12aa7-09ac-4720-b32e-920301ca04ee-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fdg9t\" (UID: \"1fc12aa7-09ac-4720-b32e-920301ca04ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682387 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bb618f2-adb1-40ff-a336-2520519233b7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-x8ln7\" (UID: \"2bb618f2-adb1-40ff-a336-2520519233b7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682409 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f64a0f1e-3264-4862-ad3b-097bf139855c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-zzlmb\" (UID: \"f64a0f1e-3264-4862-ad3b-097bf139855c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682800 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-trusted-ca-bundle\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.682977 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/58945b3f-8314-4251-a1e1-20dff39bf316-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-kw96j\" (UID: \"58945b3f-8314-4251-a1e1-20dff39bf316\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.683091 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/58945b3f-8314-4251-a1e1-20dff39bf316-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-kw96j\" (UID: \"58945b3f-8314-4251-a1e1-20dff39bf316\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.683220 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/83732716-a7c1-4be4-a73a-1d7a9633af29-serving-cert\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.683309 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a76c011-1596-4893-b01f-abf8e6e5295c-config\") pod \"kube-apiserver-operator-766d6c64bb-9kh7f\" (UID: \"2a76c011-1596-4893-b01f-abf8e6e5295c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.683393 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js874\" (UniqueName: \"kubernetes.io/projected/a35ea846-6e87-40fe-8998-037fb7bd2060-kube-api-access-js874\") pod \"service-ca-operator-777779d784-8mrgz\" (UID: \"a35ea846-6e87-40fe-8998-037fb7bd2060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684268 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/83732716-a7c1-4be4-a73a-1d7a9633af29-etcd-ca\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684329 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44eee7e7-6cdd-40bf-a998-ed2c4ae7884a-trusted-ca\") pod \"ingress-operator-5b745b69d9-p968m\" (UID: \"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684360 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1fc12aa7-09ac-4720-b32e-920301ca04ee-proxy-tls\") pod \"machine-config-operator-74547568cd-fdg9t\" (UID: \"1fc12aa7-09ac-4720-b32e-920301ca04ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684388 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/58945b3f-8314-4251-a1e1-20dff39bf316-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-kw96j\" (UID: \"58945b3f-8314-4251-a1e1-20dff39bf316\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684413 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/83732716-a7c1-4be4-a73a-1d7a9633af29-etcd-service-ca\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684438 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/507a4649-4e86-435b-8ceb-9587a90bc48c-tmpfs\") pod \"packageserver-d55dfcdfc-25wmw\" (UID: \"507a4649-4e86-435b-8ceb-9587a90bc48c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684466 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b15591c3-541e-4daf-8e21-9b69db5cf260-config-volume\") pod \"collect-profiles-29424135-v2t6r\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684499 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skwwp\" (UniqueName: \"kubernetes.io/projected/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-kube-api-access-skwwp\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684520 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/32a7449e-b310-406a-b03a-4e2e461d11ae-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fwjxr\" (UID: \"32a7449e-b310-406a-b03a-4e2e461d11ae\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684552 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q8xw\" (UniqueName: \"kubernetes.io/projected/44eee7e7-6cdd-40bf-a998-ed2c4ae7884a-kube-api-access-4q8xw\") pod \"ingress-operator-5b745b69d9-p968m\" (UID: \"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684582 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-config\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684601 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b16defcf-1335-4210-a647-bcaef5e0642f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-pnbrx\" (UID: \"b16defcf-1335-4210-a647-bcaef5e0642f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684608 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/44909f31-73c1-4282-a2f6-e93bc1bf6d32-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-rw7qg\" (UID: \"44909f31-73c1-4282-a2f6-e93bc1bf6d32\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rw7qg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684634 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-service-ca\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684712 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngs6l\" (UniqueName: \"kubernetes.io/projected/1fc12aa7-09ac-4720-b32e-920301ca04ee-kube-api-access-ngs6l\") pod \"machine-config-operator-74547568cd-fdg9t\" (UID: \"1fc12aa7-09ac-4720-b32e-920301ca04ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684733 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ffff689f-0318-485f-9f8c-a8f2f86eebaf-node-bootstrap-token\") pod \"machine-config-server-nfh76\" (UID: \"ffff689f-0318-485f-9f8c-a8f2f86eebaf\") " pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684756 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrbhv\" (UniqueName: \"kubernetes.io/projected/172795ed-fdc1-4567-ae54-f6980f7d2038-kube-api-access-wrbhv\") pod \"migrator-59844c95c7-vsdq4\" (UID: \"172795ed-fdc1-4567-ae54-f6980f7d2038\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vsdq4" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684776 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cdpd\" (UniqueName: \"kubernetes.io/projected/ffff689f-0318-485f-9f8c-a8f2f86eebaf-kube-api-access-9cdpd\") pod \"machine-config-server-nfh76\" (UID: \"ffff689f-0318-485f-9f8c-a8f2f86eebaf\") " pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684812 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3885a18d-c5e3-484e-8950-878e57b1bb43-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-zwlfp\" (UID: \"3885a18d-c5e3-484e-8950-878e57b1bb43\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684836 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a35ea846-6e87-40fe-8998-037fb7bd2060-config\") pod \"service-ca-operator-777779d784-8mrgz\" (UID: \"a35ea846-6e87-40fe-8998-037fb7bd2060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684853 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/905be650-0ed4-45ef-984c-00ab31ee178b-metrics-tls\") pod \"dns-operator-744455d44c-cb2kn\" (UID: \"905be650-0ed4-45ef-984c-00ab31ee178b\") " pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684886 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwbdm\" (UniqueName: \"kubernetes.io/projected/05009b2f-f136-4f9d-8d58-169816bb1ea6-kube-api-access-jwbdm\") pod \"marketplace-operator-79b997595-24p95\" (UID: \"05009b2f-f136-4f9d-8d58-169816bb1ea6\") " pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684917 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/44eee7e7-6cdd-40bf-a998-ed2c4ae7884a-metrics-tls\") pod \"ingress-operator-5b745b69d9-p968m\" (UID: \"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684936 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b16defcf-1335-4210-a647-bcaef5e0642f-proxy-tls\") pod \"machine-config-controller-84d6567774-pnbrx\" (UID: \"b16defcf-1335-4210-a647-bcaef5e0642f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684957 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb5nx\" (UniqueName: \"kubernetes.io/projected/44909f31-73c1-4282-a2f6-e93bc1bf6d32-kube-api-access-tb5nx\") pod \"multus-admission-controller-857f4d67dd-rw7qg\" (UID: \"44909f31-73c1-4282-a2f6-e93bc1bf6d32\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rw7qg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684980 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83732716-a7c1-4be4-a73a-1d7a9633af29-config\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.684999 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/05009b2f-f136-4f9d-8d58-169816bb1ea6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-24p95\" (UID: \"05009b2f-f136-4f9d-8d58-169816bb1ea6\") " pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685021 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6psh6\" (UniqueName: \"kubernetes.io/projected/2bb618f2-adb1-40ff-a336-2520519233b7-kube-api-access-6psh6\") pod \"openshift-apiserver-operator-796bbdcf4f-x8ln7\" (UID: \"2bb618f2-adb1-40ff-a336-2520519233b7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685040 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3a676c5e-7428-4f37-9aa2-0755611c6ff3-default-certificate\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685059 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44eee7e7-6cdd-40bf-a998-ed2c4ae7884a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-p968m\" (UID: \"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685080 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/507a4649-4e86-435b-8ceb-9587a90bc48c-apiservice-cert\") pod \"packageserver-d55dfcdfc-25wmw\" (UID: \"507a4649-4e86-435b-8ceb-9587a90bc48c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685102 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d87db89f-40fd-4e1c-a959-e167edef0c19-signing-key\") pod \"service-ca-9c57cc56f-bf8b4\" (UID: \"d87db89f-40fd-4e1c-a959-e167edef0c19\") " pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685129 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc42c87c-f305-40f2-8d38-c9ef45c05bd0-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nn574\" (UID: \"fc42c87c-f305-40f2-8d38-c9ef45c05bd0\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685212 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/10c2efee-1b70-4258-92f0-caba3a5cfd50-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-jnb74\" (UID: \"10c2efee-1b70-4258-92f0-caba3a5cfd50\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685242 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b15591c3-541e-4daf-8e21-9b69db5cf260-secret-volume\") pod \"collect-profiles-29424135-v2t6r\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685265 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcx62\" (UniqueName: \"kubernetes.io/projected/f64a0f1e-3264-4862-ad3b-097bf139855c-kube-api-access-dcx62\") pod \"olm-operator-6b444d44fb-zzlmb\" (UID: \"f64a0f1e-3264-4862-ad3b-097bf139855c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685296 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a76c011-1596-4893-b01f-abf8e6e5295c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9kh7f\" (UID: \"2a76c011-1596-4893-b01f-abf8e6e5295c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685316 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c2sn\" (UniqueName: \"kubernetes.io/projected/905be650-0ed4-45ef-984c-00ab31ee178b-kube-api-access-9c2sn\") pod \"dns-operator-744455d44c-cb2kn\" (UID: \"905be650-0ed4-45ef-984c-00ab31ee178b\") " pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685339 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3a676c5e-7428-4f37-9aa2-0755611c6ff3-metrics-certs\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685357 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3a676c5e-7428-4f37-9aa2-0755611c6ff3-service-ca-bundle\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685387 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ffff689f-0318-485f-9f8c-a8f2f86eebaf-certs\") pod \"machine-config-server-nfh76\" (UID: \"ffff689f-0318-485f-9f8c-a8f2f86eebaf\") " pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685413 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfxmj\" (UniqueName: \"kubernetes.io/projected/b16defcf-1335-4210-a647-bcaef5e0642f-kube-api-access-pfxmj\") pod \"machine-config-controller-84d6567774-pnbrx\" (UID: \"b16defcf-1335-4210-a647-bcaef5e0642f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685496 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-config\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685058 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/83732716-a7c1-4be4-a73a-1d7a9633af29-etcd-service-ca\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685712 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-service-ca\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.685932 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-oauth-serving-cert\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.686135 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83732716-a7c1-4be4-a73a-1d7a9633af29-config\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.686309 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/83732716-a7c1-4be4-a73a-1d7a9633af29-etcd-client\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.686361 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/507a4649-4e86-435b-8ceb-9587a90bc48c-tmpfs\") pod \"packageserver-d55dfcdfc-25wmw\" (UID: \"507a4649-4e86-435b-8ceb-9587a90bc48c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.687591 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1fc12aa7-09ac-4720-b32e-920301ca04ee-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fdg9t\" (UID: \"1fc12aa7-09ac-4720-b32e-920301ca04ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.687872 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.688333 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-serving-cert\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.688781 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-oauth-config\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.688942 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/58945b3f-8314-4251-a1e1-20dff39bf316-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-kw96j\" (UID: \"58945b3f-8314-4251-a1e1-20dff39bf316\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.689277 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/83732716-a7c1-4be4-a73a-1d7a9633af29-serving-cert\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.695487 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3a676c5e-7428-4f37-9aa2-0755611c6ff3-stats-auth\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.701734 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2bb618f2-adb1-40ff-a336-2520519233b7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-x8ln7\" (UID: \"2bb618f2-adb1-40ff-a336-2520519233b7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.706122 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.727059 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.738421 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3a676c5e-7428-4f37-9aa2-0755611c6ff3-metrics-certs\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.746757 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.766652 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.778094 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3a676c5e-7428-4f37-9aa2-0755611c6ff3-service-ca-bundle\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.786505 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.806220 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.826620 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.838800 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3a676c5e-7428-4f37-9aa2-0755611c6ff3-default-certificate\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.846069 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.866135 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.886140 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.906125 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.927074 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.939468 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/44eee7e7-6cdd-40bf-a998-ed2c4ae7884a-metrics-tls\") pod \"ingress-operator-5b745b69d9-p968m\" (UID: \"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.947672 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.972764 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.978106 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44eee7e7-6cdd-40bf-a998-ed2c4ae7884a-trusted-ca\") pod \"ingress-operator-5b745b69d9-p968m\" (UID: \"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:02 crc kubenswrapper[4663]: I1211 10:20:02.986034 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.006088 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.017043 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a76c011-1596-4893-b01f-abf8e6e5295c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9kh7f\" (UID: \"2a76c011-1596-4893-b01f-abf8e6e5295c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.027216 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.035755 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a76c011-1596-4893-b01f-abf8e6e5295c-config\") pod \"kube-apiserver-operator-766d6c64bb-9kh7f\" (UID: \"2a76c011-1596-4893-b01f-abf8e6e5295c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.046894 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.055043 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc42c87c-f305-40f2-8d38-c9ef45c05bd0-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nn574\" (UID: \"fc42c87c-f305-40f2-8d38-c9ef45c05bd0\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.066936 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.087315 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.107171 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.118940 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc42c87c-f305-40f2-8d38-c9ef45c05bd0-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nn574\" (UID: \"fc42c87c-f305-40f2-8d38-c9ef45c05bd0\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.126276 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.147488 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.166118 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.186501 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.206030 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.226145 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.247810 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.260196 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/05009b2f-f136-4f9d-8d58-169816bb1ea6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-24p95\" (UID: \"05009b2f-f136-4f9d-8d58-169816bb1ea6\") " pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.271840 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.273362 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05009b2f-f136-4f9d-8d58-169816bb1ea6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-24p95\" (UID: \"05009b2f-f136-4f9d-8d58-169816bb1ea6\") " pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.286497 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.306006 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.312765 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1fc12aa7-09ac-4720-b32e-920301ca04ee-images\") pod \"machine-config-operator-74547568cd-fdg9t\" (UID: \"1fc12aa7-09ac-4720-b32e-920301ca04ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.326525 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.345799 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.356873 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1fc12aa7-09ac-4720-b32e-920301ca04ee-proxy-tls\") pod \"machine-config-operator-74547568cd-fdg9t\" (UID: \"1fc12aa7-09ac-4720-b32e-920301ca04ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.366448 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.379522 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/44909f31-73c1-4282-a2f6-e93bc1bf6d32-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-rw7qg\" (UID: \"44909f31-73c1-4282-a2f6-e93bc1bf6d32\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rw7qg" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.386980 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.407445 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.426172 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.435482 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/507a4649-4e86-435b-8ceb-9587a90bc48c-webhook-cert\") pod \"packageserver-d55dfcdfc-25wmw\" (UID: \"507a4649-4e86-435b-8ceb-9587a90bc48c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.438394 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/507a4649-4e86-435b-8ceb-9587a90bc48c-apiservice-cert\") pod \"packageserver-d55dfcdfc-25wmw\" (UID: \"507a4649-4e86-435b-8ceb-9587a90bc48c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.447253 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.465196 4663 request.go:700] Waited for 1.013037764s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.466345 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.486292 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.506590 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.518298 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/32a7449e-b310-406a-b03a-4e2e461d11ae-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fwjxr\" (UID: \"32a7449e-b310-406a-b03a-4e2e461d11ae\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.526520 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.546144 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.566140 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.566979 4663 secret.go:188] Couldn't get secret openshift-route-controller-manager/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.567058 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-serving-cert podName:c7f23b6b-b7a1-46f4-b553-37be88f8fae0 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.067038009 +0000 UTC m=+134.884967793 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-serving-cert") pod "route-controller-manager-6576b87f9c-zvj6k" (UID: "c7f23b6b-b7a1-46f4-b553-37be88f8fae0") : failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.573144 4663 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.573248 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-client-ca podName:c7f23b6b-b7a1-46f4-b553-37be88f8fae0 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.073228825 +0000 UTC m=+134.891158619 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-client-ca") pod "route-controller-manager-6576b87f9c-zvj6k" (UID: "c7f23b6b-b7a1-46f4-b553-37be88f8fae0") : failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.575595 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3885a18d-c5e3-484e-8950-878e57b1bb43-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-zwlfp\" (UID: \"3885a18d-c5e3-484e-8950-878e57b1bb43\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.577611 4663 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/config: failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.577671 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-config podName:c7f23b6b-b7a1-46f4-b553-37be88f8fae0 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.077660284 +0000 UTC m=+134.895590068 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-config") pod "route-controller-manager-6576b87f9c-zvj6k" (UID: "c7f23b6b-b7a1-46f4-b553-37be88f8fae0") : failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.586751 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.595992 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3885a18d-c5e3-484e-8950-878e57b1bb43-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-zwlfp\" (UID: \"3885a18d-c5e3-484e-8950-878e57b1bb43\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.606070 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.626021 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.636128 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9813fa82-894c-4380-8975-24771adf79e7-profile-collector-cert\") pod \"catalog-operator-68c6474976-zxbz5\" (UID: \"9813fa82-894c-4380-8975-24771adf79e7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.636315 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f64a0f1e-3264-4862-ad3b-097bf139855c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-zzlmb\" (UID: \"f64a0f1e-3264-4862-ad3b-097bf139855c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.638129 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b15591c3-541e-4daf-8e21-9b69db5cf260-secret-volume\") pod \"collect-profiles-29424135-v2t6r\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.646353 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.656263 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f64a0f1e-3264-4862-ad3b-097bf139855c-srv-cert\") pod \"olm-operator-6b444d44fb-zzlmb\" (UID: \"f64a0f1e-3264-4862-ad3b-097bf139855c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.683582 4663 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.683656 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a35ea846-6e87-40fe-8998-037fb7bd2060-serving-cert podName:a35ea846-6e87-40fe-8998-037fb7bd2060 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.183639075 +0000 UTC m=+135.001568859 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/a35ea846-6e87-40fe-8998-037fb7bd2060-serving-cert") pod "service-ca-operator-777779d784-8mrgz" (UID: "a35ea846-6e87-40fe-8998-037fb7bd2060") : failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.683810 4663 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/catalog-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.683873 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9813fa82-894c-4380-8975-24771adf79e7-srv-cert podName:9813fa82-894c-4380-8975-24771adf79e7 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.183856942 +0000 UTC m=+135.001786726 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/9813fa82-894c-4380-8975-24771adf79e7-srv-cert") pod "catalog-operator-68c6474976-zxbz5" (UID: "9813fa82-894c-4380-8975-24771adf79e7") : failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.683900 4663 configmap.go:193] Couldn't get configMap openshift-service-ca/signing-cabundle: failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.683940 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d87db89f-40fd-4e1c-a959-e167edef0c19-signing-cabundle podName:d87db89f-40fd-4e1c-a959-e167edef0c19 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.183926965 +0000 UTC m=+135.001856739 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-cabundle" (UniqueName: "kubernetes.io/configmap/d87db89f-40fd-4e1c-a959-e167edef0c19-signing-cabundle") pod "service-ca-9c57cc56f-bf8b4" (UID: "d87db89f-40fd-4e1c-a959-e167edef0c19") : failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.685579 4663 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.685614 4663 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/package-server-manager-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.685640 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ffff689f-0318-485f-9f8c-a8f2f86eebaf-node-bootstrap-token podName:ffff689f-0318-485f-9f8c-a8f2f86eebaf nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.185621601 +0000 UTC m=+135.003551385 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/ffff689f-0318-485f-9f8c-a8f2f86eebaf-node-bootstrap-token") pod "machine-config-server-nfh76" (UID: "ffff689f-0318-485f-9f8c-a8f2f86eebaf") : failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.685583 4663 configmap.go:193] Couldn't get configMap openshift-operator-lifecycle-manager/collect-profiles-config: failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.685661 4663 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.685671 4663 secret.go:188] Couldn't get secret openshift-dns-operator/metrics-tls: failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.685684 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/10c2efee-1b70-4258-92f0-caba3a5cfd50-package-server-manager-serving-cert podName:10c2efee-1b70-4258-92f0-caba3a5cfd50 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.185666683 +0000 UTC m=+135.003596467 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "package-server-manager-serving-cert" (UniqueName: "kubernetes.io/secret/10c2efee-1b70-4258-92f0-caba3a5cfd50-package-server-manager-serving-cert") pod "package-server-manager-789f6589d5-jnb74" (UID: "10c2efee-1b70-4258-92f0-caba3a5cfd50") : failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.685723 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b15591c3-541e-4daf-8e21-9b69db5cf260-config-volume podName:b15591c3-541e-4daf-8e21-9b69db5cf260 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.185715955 +0000 UTC m=+135.003645739 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/b15591c3-541e-4daf-8e21-9b69db5cf260-config-volume") pod "collect-profiles-29424135-v2t6r" (UID: "b15591c3-541e-4daf-8e21-9b69db5cf260") : failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.685735 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a35ea846-6e87-40fe-8998-037fb7bd2060-config podName:a35ea846-6e87-40fe-8998-037fb7bd2060 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.185729412 +0000 UTC m=+135.003659196 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/a35ea846-6e87-40fe-8998-037fb7bd2060-config") pod "service-ca-operator-777779d784-8mrgz" (UID: "a35ea846-6e87-40fe-8998-037fb7bd2060") : failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.685747 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/905be650-0ed4-45ef-984c-00ab31ee178b-metrics-tls podName:905be650-0ed4-45ef-984c-00ab31ee178b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.185742919 +0000 UTC m=+135.003672703 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/905be650-0ed4-45ef-984c-00ab31ee178b-metrics-tls") pod "dns-operator-744455d44c-cb2kn" (UID: "905be650-0ed4-45ef-984c-00ab31ee178b") : failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.685789 4663 secret.go:188] Couldn't get secret openshift-service-ca/signing-key: failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.685825 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d87db89f-40fd-4e1c-a959-e167edef0c19-signing-key podName:d87db89f-40fd-4e1c-a959-e167edef0c19 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.185810608 +0000 UTC m=+135.003740392 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-key" (UniqueName: "kubernetes.io/secret/d87db89f-40fd-4e1c-a959-e167edef0c19-signing-key") pod "service-ca-9c57cc56f-bf8b4" (UID: "d87db89f-40fd-4e1c-a959-e167edef0c19") : failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.686622 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.686633 4663 secret.go:188] Couldn't get secret openshift-machine-config-operator/mcc-proxy-tls: failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.686662 4663 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.686671 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b16defcf-1335-4210-a647-bcaef5e0642f-proxy-tls podName:b16defcf-1335-4210-a647-bcaef5e0642f nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.18666008 +0000 UTC m=+135.004589864 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/b16defcf-1335-4210-a647-bcaef5e0642f-proxy-tls") pod "machine-config-controller-84d6567774-pnbrx" (UID: "b16defcf-1335-4210-a647-bcaef5e0642f") : failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: E1211 10:20:03.686716 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ffff689f-0318-485f-9f8c-a8f2f86eebaf-certs podName:ffff689f-0318-485f-9f8c-a8f2f86eebaf nodeName:}" failed. No retries permitted until 2025-12-11 10:20:04.186706777 +0000 UTC m=+135.004636560 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/ffff689f-0318-485f-9f8c-a8f2f86eebaf-certs") pod "machine-config-server-nfh76" (UID: "ffff689f-0318-485f-9f8c-a8f2f86eebaf") : failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.706284 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.726744 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.746772 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.766527 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.786674 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.805865 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.826634 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.845945 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.866069 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.886702 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.906634 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.926075 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.946421 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.966927 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 11 10:20:03 crc kubenswrapper[4663]: I1211 10:20:03.986297 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.006944 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.026218 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.046965 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.066375 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.086866 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.106541 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-client-ca\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.106664 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-config\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.106707 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-serving-cert\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.107651 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.126109 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.147271 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.166887 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.186722 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.207514 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d87db89f-40fd-4e1c-a959-e167edef0c19-signing-key\") pod \"service-ca-9c57cc56f-bf8b4\" (UID: \"d87db89f-40fd-4e1c-a959-e167edef0c19\") " pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.207559 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/10c2efee-1b70-4258-92f0-caba3a5cfd50-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-jnb74\" (UID: \"10c2efee-1b70-4258-92f0-caba3a5cfd50\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.207617 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ffff689f-0318-485f-9f8c-a8f2f86eebaf-certs\") pod \"machine-config-server-nfh76\" (UID: \"ffff689f-0318-485f-9f8c-a8f2f86eebaf\") " pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.207708 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9813fa82-894c-4380-8975-24771adf79e7-srv-cert\") pod \"catalog-operator-68c6474976-zxbz5\" (UID: \"9813fa82-894c-4380-8975-24771adf79e7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.207740 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d87db89f-40fd-4e1c-a959-e167edef0c19-signing-cabundle\") pod \"service-ca-9c57cc56f-bf8b4\" (UID: \"d87db89f-40fd-4e1c-a959-e167edef0c19\") " pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.207756 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a35ea846-6e87-40fe-8998-037fb7bd2060-serving-cert\") pod \"service-ca-operator-777779d784-8mrgz\" (UID: \"a35ea846-6e87-40fe-8998-037fb7bd2060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.207821 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b15591c3-541e-4daf-8e21-9b69db5cf260-config-volume\") pod \"collect-profiles-29424135-v2t6r\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.207902 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ffff689f-0318-485f-9f8c-a8f2f86eebaf-node-bootstrap-token\") pod \"machine-config-server-nfh76\" (UID: \"ffff689f-0318-485f-9f8c-a8f2f86eebaf\") " pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.207936 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a35ea846-6e87-40fe-8998-037fb7bd2060-config\") pod \"service-ca-operator-777779d784-8mrgz\" (UID: \"a35ea846-6e87-40fe-8998-037fb7bd2060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.207951 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/905be650-0ed4-45ef-984c-00ab31ee178b-metrics-tls\") pod \"dns-operator-744455d44c-cb2kn\" (UID: \"905be650-0ed4-45ef-984c-00ab31ee178b\") " pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.207989 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b16defcf-1335-4210-a647-bcaef5e0642f-proxy-tls\") pod \"machine-config-controller-84d6567774-pnbrx\" (UID: \"b16defcf-1335-4210-a647-bcaef5e0642f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.209324 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a35ea846-6e87-40fe-8998-037fb7bd2060-config\") pod \"service-ca-operator-777779d784-8mrgz\" (UID: \"a35ea846-6e87-40fe-8998-037fb7bd2060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.209419 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b15591c3-541e-4daf-8e21-9b69db5cf260-config-volume\") pod \"collect-profiles-29424135-v2t6r\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.209794 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d87db89f-40fd-4e1c-a959-e167edef0c19-signing-cabundle\") pod \"service-ca-9c57cc56f-bf8b4\" (UID: \"d87db89f-40fd-4e1c-a959-e167edef0c19\") " pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.211304 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a35ea846-6e87-40fe-8998-037fb7bd2060-serving-cert\") pod \"service-ca-operator-777779d784-8mrgz\" (UID: \"a35ea846-6e87-40fe-8998-037fb7bd2060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.211514 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b16defcf-1335-4210-a647-bcaef5e0642f-proxy-tls\") pod \"machine-config-controller-84d6567774-pnbrx\" (UID: \"b16defcf-1335-4210-a647-bcaef5e0642f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.211675 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/905be650-0ed4-45ef-984c-00ab31ee178b-metrics-tls\") pod \"dns-operator-744455d44c-cb2kn\" (UID: \"905be650-0ed4-45ef-984c-00ab31ee178b\") " pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.212170 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9813fa82-894c-4380-8975-24771adf79e7-srv-cert\") pod \"catalog-operator-68c6474976-zxbz5\" (UID: \"9813fa82-894c-4380-8975-24771adf79e7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.212367 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/10c2efee-1b70-4258-92f0-caba3a5cfd50-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-jnb74\" (UID: \"10c2efee-1b70-4258-92f0-caba3a5cfd50\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.212447 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d87db89f-40fd-4e1c-a959-e167edef0c19-signing-key\") pod \"service-ca-9c57cc56f-bf8b4\" (UID: \"d87db89f-40fd-4e1c-a959-e167edef0c19\") " pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.212518 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ffff689f-0318-485f-9f8c-a8f2f86eebaf-certs\") pod \"machine-config-server-nfh76\" (UID: \"ffff689f-0318-485f-9f8c-a8f2f86eebaf\") " pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.212734 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ffff689f-0318-485f-9f8c-a8f2f86eebaf-node-bootstrap-token\") pod \"machine-config-server-nfh76\" (UID: \"ffff689f-0318-485f-9f8c-a8f2f86eebaf\") " pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.226801 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.247207 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.266589 4663 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.286849 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.306004 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.326653 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.346930 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.366518 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.387475 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.406095 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.439359 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf224\" (UniqueName: \"kubernetes.io/projected/99b2469b-932c-4a11-be99-ace72cff3d56-kube-api-access-hf224\") pod \"console-operator-58897d9998-xhhrv\" (UID: \"99b2469b-932c-4a11-be99-ace72cff3d56\") " pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.459940 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9bdr\" (UniqueName: \"kubernetes.io/projected/26fa94f3-6f45-4641-a0b5-b027b7db54c3-kube-api-access-z9bdr\") pod \"apiserver-76f77b778f-9cvs5\" (UID: \"26fa94f3-6f45-4641-a0b5-b027b7db54c3\") " pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.465526 4663 request.go:700] Waited for 1.893269676s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication-operator/serviceaccounts/authentication-operator/token Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.477747 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5vgk\" (UniqueName: \"kubernetes.io/projected/7e2ab408-9ac3-4e29-aed5-42d77f94ad50-kube-api-access-l5vgk\") pod \"authentication-operator-69f744f599-cbp5t\" (UID: \"7e2ab408-9ac3-4e29-aed5-42d77f94ad50\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.498042 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jgl4\" (UniqueName: \"kubernetes.io/projected/1001a5ac-b44d-41c2-979f-a987c9c6d63f-kube-api-access-5jgl4\") pod \"machine-api-operator-5694c8668f-g29lh\" (UID: \"1001a5ac-b44d-41c2-979f-a987c9c6d63f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.513840 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.516457 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.519132 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fslg7\" (UniqueName: \"kubernetes.io/projected/14c24a3b-a966-47ff-a122-50abb423e4b3-kube-api-access-fslg7\") pod \"cluster-samples-operator-665b6dd947-mzhff\" (UID: \"14c24a3b-a966-47ff-a122-50abb423e4b3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.538098 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dztlb\" (UniqueName: \"kubernetes.io/projected/2c2ceeb7-b3cc-4085-8591-0e0c42a01c34-kube-api-access-dztlb\") pod \"apiserver-7bbb656c7d-hfmnw\" (UID: \"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.562608 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhpvj\" (UniqueName: \"kubernetes.io/projected/b278aa37-9fc1-430d-9182-2dbf0f56b930-kube-api-access-lhpvj\") pod \"machine-approver-56656f9798-26pzr\" (UID: \"b278aa37-9fc1-430d-9182-2dbf0f56b930\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.574647 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.581402 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktlbb\" (UniqueName: \"kubernetes.io/projected/e3cf153e-0d08-489d-8d35-1170356831ee-kube-api-access-ktlbb\") pod \"openshift-config-operator-7777fb866f-hvrvb\" (UID: \"e3cf153e-0d08-489d-8d35-1170356831ee\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.582238 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.597377 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.612105 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.625688 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6rl2\" (UniqueName: \"kubernetes.io/projected/14f276e8-14d3-4e67-956d-ad6212f45c69-kube-api-access-x6rl2\") pod \"controller-manager-879f6c89f-n6dwq\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.640446 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9g9k\" (UniqueName: \"kubernetes.io/projected/a4cc9117-6286-44ba-a590-893cff694a7c-kube-api-access-w9g9k\") pod \"oauth-openshift-558db77b4-cgckf\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.660426 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnlld\" (UniqueName: \"kubernetes.io/projected/46f7ebf5-734d-41f6-a8eb-1daf1dc0e014-kube-api-access-jnlld\") pod \"downloads-7954f5f757-mn2pm\" (UID: \"46f7ebf5-734d-41f6-a8eb-1daf1dc0e014\") " pod="openshift-console/downloads-7954f5f757-mn2pm" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.680456 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fc42c87c-f305-40f2-8d38-c9ef45c05bd0-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nn574\" (UID: \"fc42c87c-f305-40f2-8d38-c9ef45c05bd0\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.696613 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-9cvs5"] Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.706810 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2txq\" (UniqueName: \"kubernetes.io/projected/3885a18d-c5e3-484e-8950-878e57b1bb43-kube-api-access-c2txq\") pod \"kube-storage-version-migrator-operator-b67b599dd-zwlfp\" (UID: \"3885a18d-c5e3-484e-8950-878e57b1bb43\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.710422 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" Dec 11 10:20:04 crc kubenswrapper[4663]: W1211 10:20:04.716285 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26fa94f3_6f45_4641_a0b5_b027b7db54c3.slice/crio-30332871c2674c97213f164c3b17bbcfb5a3e4d96309be792e335d47eef1492c WatchSource:0}: Error finding container 30332871c2674c97213f164c3b17bbcfb5a3e4d96309be792e335d47eef1492c: Status 404 returned error can't find the container with id 30332871c2674c97213f164c3b17bbcfb5a3e4d96309be792e335d47eef1492c Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.729010 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-xhhrv"] Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.730279 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-746jm\" (UniqueName: \"kubernetes.io/projected/d87db89f-40fd-4e1c-a959-e167edef0c19-kube-api-access-746jm\") pod \"service-ca-9c57cc56f-bf8b4\" (UID: \"d87db89f-40fd-4e1c-a959-e167edef0c19\") " pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.748649 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhrrz\" (UniqueName: \"kubernetes.io/projected/10c2efee-1b70-4258-92f0-caba3a5cfd50-kube-api-access-bhrrz\") pod \"package-server-manager-789f6589d5-jnb74\" (UID: \"10c2efee-1b70-4258-92f0-caba3a5cfd50\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.751091 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.757806 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwbrw\" (UniqueName: \"kubernetes.io/projected/b15591c3-541e-4daf-8e21-9b69db5cf260-kube-api-access-dwbrw\") pod \"collect-profiles-29424135-v2t6r\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.766720 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-cbp5t"] Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.767711 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.772221 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.777153 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:04 crc kubenswrapper[4663]: W1211 10:20:04.777756 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e2ab408_9ac3_4e29_aed5_42d77f94ad50.slice/crio-d2c43de3a15501f06a87f0a4435b4c02899639f93e9750cefb8bc950d5fa9e7c WatchSource:0}: Error finding container d2c43de3a15501f06a87f0a4435b4c02899639f93e9750cefb8bc950d5fa9e7c: Status 404 returned error can't find the container with id d2c43de3a15501f06a87f0a4435b4c02899639f93e9750cefb8bc950d5fa9e7c Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.781015 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pvvg\" (UniqueName: \"kubernetes.io/projected/9813fa82-894c-4380-8975-24771adf79e7-kube-api-access-6pvvg\") pod \"catalog-operator-68c6474976-zxbz5\" (UID: \"9813fa82-894c-4380-8975-24771adf79e7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.807896 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb77h\" (UniqueName: \"kubernetes.io/projected/3a676c5e-7428-4f37-9aa2-0755611c6ff3-kube-api-access-xb77h\") pod \"router-default-5444994796-n9pwg\" (UID: \"3a676c5e-7428-4f37-9aa2-0755611c6ff3\") " pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.808707 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.809255 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw"] Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.821098 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st982\" (UniqueName: \"kubernetes.io/projected/83732716-a7c1-4be4-a73a-1d7a9633af29-kube-api-access-st982\") pod \"etcd-operator-b45778765-75lmn\" (UID: \"83732716-a7c1-4be4-a73a-1d7a9633af29\") " pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.828399 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.828816 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-g29lh"] Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.841149 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4fd2\" (UniqueName: \"kubernetes.io/projected/58945b3f-8314-4251-a1e1-20dff39bf316-kube-api-access-q4fd2\") pod \"cluster-image-registry-operator-dc59b4c8b-kw96j\" (UID: \"58945b3f-8314-4251-a1e1-20dff39bf316\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.848101 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff"] Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.855572 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.860819 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ght7c\" (UniqueName: \"kubernetes.io/projected/32a7449e-b310-406a-b03a-4e2e461d11ae-kube-api-access-ght7c\") pod \"control-plane-machine-set-operator-78cbb6b69f-fwjxr\" (UID: \"32a7449e-b310-406a-b03a-4e2e461d11ae\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.866906 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.881064 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js874\" (UniqueName: \"kubernetes.io/projected/a35ea846-6e87-40fe-8998-037fb7bd2060-kube-api-access-js874\") pod \"service-ca-operator-777779d784-8mrgz\" (UID: \"a35ea846-6e87-40fe-8998-037fb7bd2060\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.889739 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.918033 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q8xw\" (UniqueName: \"kubernetes.io/projected/44eee7e7-6cdd-40bf-a998-ed2c4ae7884a-kube-api-access-4q8xw\") pod \"ingress-operator-5b745b69d9-p968m\" (UID: \"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.919304 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-mn2pm" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.937729 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574"] Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.949210 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skwwp\" (UniqueName: \"kubernetes.io/projected/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-kube-api-access-skwwp\") pod \"console-f9d7485db-dc8cp\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.949740 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngs6l\" (UniqueName: \"kubernetes.io/projected/1fc12aa7-09ac-4720-b32e-920301ca04ee-kube-api-access-ngs6l\") pod \"machine-config-operator-74547568cd-fdg9t\" (UID: \"1fc12aa7-09ac-4720-b32e-920301ca04ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.949940 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.972031 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.972163 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb5nx\" (UniqueName: \"kubernetes.io/projected/44909f31-73c1-4282-a2f6-e93bc1bf6d32-kube-api-access-tb5nx\") pod \"multus-admission-controller-857f4d67dd-rw7qg\" (UID: \"44909f31-73c1-4282-a2f6-e93bc1bf6d32\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rw7qg" Dec 11 10:20:04 crc kubenswrapper[4663]: I1211 10:20:04.997531 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrbhv\" (UniqueName: \"kubernetes.io/projected/172795ed-fdc1-4567-ae54-f6980f7d2038-kube-api-access-wrbhv\") pod \"migrator-59844c95c7-vsdq4\" (UID: \"172795ed-fdc1-4567-ae54-f6980f7d2038\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vsdq4" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.004039 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cdpd\" (UniqueName: \"kubernetes.io/projected/ffff689f-0318-485f-9f8c-a8f2f86eebaf-kube-api-access-9cdpd\") pod \"machine-config-server-nfh76\" (UID: \"ffff689f-0318-485f-9f8c-a8f2f86eebaf\") " pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.027669 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.029156 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfxmj\" (UniqueName: \"kubernetes.io/projected/b16defcf-1335-4210-a647-bcaef5e0642f-kube-api-access-pfxmj\") pod \"machine-config-controller-84d6567774-pnbrx\" (UID: \"b16defcf-1335-4210-a647-bcaef5e0642f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.032987 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-rw7qg" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.044773 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.048742 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" event={"ID":"b278aa37-9fc1-430d-9182-2dbf0f56b930","Type":"ContainerStarted","Data":"08177af4c7be018d53971b69d59eaa3e38f573ea94dc6b7673cfef80c3d71a2e"} Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.049145 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwbdm\" (UniqueName: \"kubernetes.io/projected/05009b2f-f136-4f9d-8d58-169816bb1ea6-kube-api-access-jwbdm\") pod \"marketplace-operator-79b997595-24p95\" (UID: \"05009b2f-f136-4f9d-8d58-169816bb1ea6\") " pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.052865 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff" event={"ID":"14c24a3b-a966-47ff-a122-50abb423e4b3","Type":"ContainerStarted","Data":"da521a72a864d3b4197178015adf5aafe23aa937555782709c40c6b5b9af2c0d"} Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.055445 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-xhhrv" event={"ID":"99b2469b-932c-4a11-be99-ace72cff3d56","Type":"ContainerStarted","Data":"e7bea2ab9dd2bac682706382a9f9294f32bbad81361345ca0692ea8b57dbae61"} Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.055497 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-xhhrv" event={"ID":"99b2469b-932c-4a11-be99-ace72cff3d56","Type":"ContainerStarted","Data":"3461e36256d054a5e2d71a6625603cd451380fcc35af489a06e607c319024b59"} Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.055520 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.057662 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" event={"ID":"26fa94f3-6f45-4641-a0b5-b027b7db54c3","Type":"ContainerStarted","Data":"30332871c2674c97213f164c3b17bbcfb5a3e4d96309be792e335d47eef1492c"} Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.058348 4663 patch_prober.go:28] interesting pod/console-operator-58897d9998-xhhrv container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.058393 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-xhhrv" podUID="99b2469b-932c-4a11-be99-ace72cff3d56" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.059535 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44eee7e7-6cdd-40bf-a998-ed2c4ae7884a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-p968m\" (UID: \"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.060821 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" event={"ID":"fc42c87c-f305-40f2-8d38-c9ef45c05bd0","Type":"ContainerStarted","Data":"a1179e5627a9b7f134f5b4381cf3aa4d340adbfa1dbc2da9bf79a0f799696dc6"} Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.061817 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" event={"ID":"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34","Type":"ContainerStarted","Data":"3237c84f217fa3a982fa9dd5001055f06fccfe3e11e3b617b4945a4b5fa1d40a"} Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.063154 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" event={"ID":"7e2ab408-9ac3-4e29-aed5-42d77f94ad50","Type":"ContainerStarted","Data":"4ea68925a09f1440ef2a09b8ca2e4b4d28e7b676bf72e97c95a398ed760657ba"} Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.063177 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" event={"ID":"7e2ab408-9ac3-4e29-aed5-42d77f94ad50","Type":"ContainerStarted","Data":"d2c43de3a15501f06a87f0a4435b4c02899639f93e9750cefb8bc950d5fa9e7c"} Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.063622 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.067529 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" event={"ID":"1001a5ac-b44d-41c2-979f-a987c9c6d63f","Type":"ContainerStarted","Data":"6493b47ccef773bd23246232a471368061540b47da25901bc2090cf07a459b5e"} Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.081062 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcx62\" (UniqueName: \"kubernetes.io/projected/f64a0f1e-3264-4862-ad3b-097bf139855c-kube-api-access-dcx62\") pod \"olm-operator-6b444d44fb-zzlmb\" (UID: \"f64a0f1e-3264-4862-ad3b-097bf139855c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.090097 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.092153 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-bf8b4"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.101901 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a76c011-1596-4893-b01f-abf8e6e5295c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9kh7f\" (UID: \"2a76c011-1596-4893-b01f-abf8e6e5295c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.107671 4663 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/config: failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.107730 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-config podName:c7f23b6b-b7a1-46f4-b553-37be88f8fae0 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.107714365 +0000 UTC m=+136.925644149 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-config") pod "route-controller-manager-6576b87f9c-zvj6k" (UID: "c7f23b6b-b7a1-46f4-b553-37be88f8fae0") : failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.108012 4663 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.108045 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-client-ca podName:c7f23b6b-b7a1-46f4-b553-37be88f8fae0 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.108035953 +0000 UTC m=+136.925965738 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-client-ca") pod "route-controller-manager-6576b87f9c-zvj6k" (UID: "c7f23b6b-b7a1-46f4-b553-37be88f8fae0") : failed to sync configmap cache: timed out waiting for the condition Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.108068 4663 secret.go:188] Couldn't get secret openshift-route-controller-manager/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.108087 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-serving-cert podName:c7f23b6b-b7a1-46f4-b553-37be88f8fae0 nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.108082068 +0000 UTC m=+136.926011853 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-serving-cert") pod "route-controller-manager-6576b87f9c-zvj6k" (UID: "c7f23b6b-b7a1-46f4-b553-37be88f8fae0") : failed to sync secret cache: timed out waiting for the condition Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.120069 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-nfh76" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.125056 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/58945b3f-8314-4251-a1e1-20dff39bf316-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-kw96j\" (UID: \"58945b3f-8314-4251-a1e1-20dff39bf316\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.134124 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vsdq4" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.141220 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6psh6\" (UniqueName: \"kubernetes.io/projected/2bb618f2-adb1-40ff-a336-2520519233b7-kube-api-access-6psh6\") pod \"openshift-apiserver-operator-796bbdcf4f-x8ln7\" (UID: \"2bb618f2-adb1-40ff-a336-2520519233b7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.158978 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c2sn\" (UniqueName: \"kubernetes.io/projected/905be650-0ed4-45ef-984c-00ab31ee178b-kube-api-access-9c2sn\") pod \"dns-operator-744455d44c-cb2kn\" (UID: \"905be650-0ed4-45ef-984c-00ab31ee178b\") " pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.183561 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ggzg\" (UniqueName: \"kubernetes.io/projected/507a4649-4e86-435b-8ceb-9587a90bc48c-kube-api-access-5ggzg\") pod \"packageserver-d55dfcdfc-25wmw\" (UID: \"507a4649-4e86-435b-8ceb-9587a90bc48c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.188499 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.206385 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.226440 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.226711 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.232143 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.236412 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.238370 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.246346 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.246635 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdpvq\" (UniqueName: \"kubernetes.io/projected/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-kube-api-access-jdpvq\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.266570 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.272935 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.286077 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.299525 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-n6dwq"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.321353 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.342766 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.342948 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/62062293-e497-4499-b728-226260dc0ae1-registry-certificates\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343002 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62062293-e497-4499-b728-226260dc0ae1-trusted-ca\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343019 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-bound-sa-token\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343037 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzng4\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-kube-api-access-kzng4\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343075 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343138 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f7716ca-ea2e-45ea-a861-d592b5c077ac-config\") pod \"kube-controller-manager-operator-78b949d7b-xt8gz\" (UID: \"7f7716ca-ea2e-45ea-a861-d592b5c077ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343164 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f7716ca-ea2e-45ea-a861-d592b5c077ac-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xt8gz\" (UID: \"7f7716ca-ea2e-45ea-a861-d592b5c077ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343199 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e035b58-1374-487b-9d9f-e9317793cdc7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-h4kjb\" (UID: \"4e035b58-1374-487b-9d9f-e9317793cdc7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343237 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e035b58-1374-487b-9d9f-e9317793cdc7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-h4kjb\" (UID: \"4e035b58-1374-487b-9d9f-e9317793cdc7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343257 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/62062293-e497-4499-b728-226260dc0ae1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343278 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgdz8\" (UniqueName: \"kubernetes.io/projected/4e035b58-1374-487b-9d9f-e9317793cdc7-kube-api-access-rgdz8\") pod \"openshift-controller-manager-operator-756b6f6bc6-h4kjb\" (UID: \"4e035b58-1374-487b-9d9f-e9317793cdc7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343386 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/62062293-e497-4499-b728-226260dc0ae1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343403 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-registry-tls\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.343465 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f7716ca-ea2e-45ea-a861-d592b5c077ac-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xt8gz\" (UID: \"7f7716ca-ea2e-45ea-a861-d592b5c077ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.343693 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:05.843675695 +0000 UTC m=+136.661605479 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.349596 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.360488 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.370066 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cgckf"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.379829 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.379910 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.382711 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.405995 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-75lmn"] Dec 11 10:20:05 crc kubenswrapper[4663]: W1211 10:20:05.435389 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10c2efee_1b70_4258_92f0_caba3a5cfd50.slice/crio-a86a39b09a2be2c112e153549336097a1a291cddd26cf8085ee2d4b0c3f4ccc9 WatchSource:0}: Error finding container a86a39b09a2be2c112e153549336097a1a291cddd26cf8085ee2d4b0c3f4ccc9: Status 404 returned error can't find the container with id a86a39b09a2be2c112e153549336097a1a291cddd26cf8085ee2d4b0c3f4ccc9 Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.444381 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.444617 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:05.94456539 +0000 UTC m=+136.762495174 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.444990 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a533fee8-aa25-4eae-9eff-134d4b61033f-config-volume\") pod \"dns-default-vm8rp\" (UID: \"a533fee8-aa25-4eae-9eff-134d4b61033f\") " pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445135 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f7716ca-ea2e-45ea-a861-d592b5c077ac-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xt8gz\" (UID: \"7f7716ca-ea2e-45ea-a861-d592b5c077ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445166 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/62062293-e497-4499-b728-226260dc0ae1-registry-certificates\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445295 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62062293-e497-4499-b728-226260dc0ae1-trusted-ca\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445311 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-bound-sa-token\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445359 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzng4\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-kube-api-access-kzng4\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445515 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445607 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mndkw\" (UniqueName: \"kubernetes.io/projected/99f0a410-2f12-45d0-8b95-23552288ee49-kube-api-access-mndkw\") pod \"ingress-canary-bnmt9\" (UID: \"99f0a410-2f12-45d0-8b95-23552288ee49\") " pod="openshift-ingress-canary/ingress-canary-bnmt9" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445652 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxknj\" (UniqueName: \"kubernetes.io/projected/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-kube-api-access-wxknj\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445768 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-mountpoint-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445782 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-plugins-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445830 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-registration-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445848 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f7716ca-ea2e-45ea-a861-d592b5c077ac-config\") pod \"kube-controller-manager-operator-78b949d7b-xt8gz\" (UID: \"7f7716ca-ea2e-45ea-a861-d592b5c077ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445891 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a533fee8-aa25-4eae-9eff-134d4b61033f-metrics-tls\") pod \"dns-default-vm8rp\" (UID: \"a533fee8-aa25-4eae-9eff-134d4b61033f\") " pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445927 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/99f0a410-2f12-45d0-8b95-23552288ee49-cert\") pod \"ingress-canary-bnmt9\" (UID: \"99f0a410-2f12-45d0-8b95-23552288ee49\") " pod="openshift-ingress-canary/ingress-canary-bnmt9" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.445977 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f7716ca-ea2e-45ea-a861-d592b5c077ac-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xt8gz\" (UID: \"7f7716ca-ea2e-45ea-a861-d592b5c077ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.446010 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e035b58-1374-487b-9d9f-e9317793cdc7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-h4kjb\" (UID: \"4e035b58-1374-487b-9d9f-e9317793cdc7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.446026 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e035b58-1374-487b-9d9f-e9317793cdc7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-h4kjb\" (UID: \"4e035b58-1374-487b-9d9f-e9317793cdc7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.446042 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz92z\" (UniqueName: \"kubernetes.io/projected/a533fee8-aa25-4eae-9eff-134d4b61033f-kube-api-access-lz92z\") pod \"dns-default-vm8rp\" (UID: \"a533fee8-aa25-4eae-9eff-134d4b61033f\") " pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.446087 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/62062293-e497-4499-b728-226260dc0ae1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.446152 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgdz8\" (UniqueName: \"kubernetes.io/projected/4e035b58-1374-487b-9d9f-e9317793cdc7-kube-api-access-rgdz8\") pod \"openshift-controller-manager-operator-756b6f6bc6-h4kjb\" (UID: \"4e035b58-1374-487b-9d9f-e9317793cdc7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.446255 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-csi-data-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.446298 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/62062293-e497-4499-b728-226260dc0ae1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.446351 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-registry-tls\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.446372 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-socket-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: W1211 10:20:05.447274 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3cf153e_0d08_489d_8d35_1170356831ee.slice/crio-a30ce2113340a47878f5fb01066ef5ce7bde4cfb98607ff139209fe1c3a8b5da WatchSource:0}: Error finding container a30ce2113340a47878f5fb01066ef5ce7bde4cfb98607ff139209fe1c3a8b5da: Status 404 returned error can't find the container with id a30ce2113340a47878f5fb01066ef5ce7bde4cfb98607ff139209fe1c3a8b5da Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.447669 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/62062293-e497-4499-b728-226260dc0ae1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.449831 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f7716ca-ea2e-45ea-a861-d592b5c077ac-config\") pod \"kube-controller-manager-operator-78b949d7b-xt8gz\" (UID: \"7f7716ca-ea2e-45ea-a861-d592b5c077ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.450545 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:05.950527978 +0000 UTC m=+136.768457762 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.452286 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62062293-e497-4499-b728-226260dc0ae1-trusted-ca\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.453675 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e035b58-1374-487b-9d9f-e9317793cdc7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-h4kjb\" (UID: \"4e035b58-1374-487b-9d9f-e9317793cdc7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.453600 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/62062293-e497-4499-b728-226260dc0ae1-registry-certificates\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.457535 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e035b58-1374-487b-9d9f-e9317793cdc7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-h4kjb\" (UID: \"4e035b58-1374-487b-9d9f-e9317793cdc7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.458986 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-registry-tls\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.459417 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f7716ca-ea2e-45ea-a861-d592b5c077ac-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xt8gz\" (UID: \"7f7716ca-ea2e-45ea-a861-d592b5c077ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.460808 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/62062293-e497-4499-b728-226260dc0ae1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.503163 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-mn2pm"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.527354 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f7716ca-ea2e-45ea-a861-d592b5c077ac-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xt8gz\" (UID: \"7f7716ca-ea2e-45ea-a861-d592b5c077ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.527709 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.527737 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.530483 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzng4\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-kube-api-access-kzng4\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.540400 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-bound-sa-token\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.549577 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.549788 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mndkw\" (UniqueName: \"kubernetes.io/projected/99f0a410-2f12-45d0-8b95-23552288ee49-kube-api-access-mndkw\") pod \"ingress-canary-bnmt9\" (UID: \"99f0a410-2f12-45d0-8b95-23552288ee49\") " pod="openshift-ingress-canary/ingress-canary-bnmt9" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.549817 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxknj\" (UniqueName: \"kubernetes.io/projected/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-kube-api-access-wxknj\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.549845 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-mountpoint-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.549860 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-plugins-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.549878 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-registration-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.549896 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a533fee8-aa25-4eae-9eff-134d4b61033f-metrics-tls\") pod \"dns-default-vm8rp\" (UID: \"a533fee8-aa25-4eae-9eff-134d4b61033f\") " pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.549913 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/99f0a410-2f12-45d0-8b95-23552288ee49-cert\") pod \"ingress-canary-bnmt9\" (UID: \"99f0a410-2f12-45d0-8b95-23552288ee49\") " pod="openshift-ingress-canary/ingress-canary-bnmt9" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.549932 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz92z\" (UniqueName: \"kubernetes.io/projected/a533fee8-aa25-4eae-9eff-134d4b61033f-kube-api-access-lz92z\") pod \"dns-default-vm8rp\" (UID: \"a533fee8-aa25-4eae-9eff-134d4b61033f\") " pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.549967 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-csi-data-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.549989 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-socket-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.550029 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a533fee8-aa25-4eae-9eff-134d4b61033f-config-volume\") pod \"dns-default-vm8rp\" (UID: \"a533fee8-aa25-4eae-9eff-134d4b61033f\") " pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.550246 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-registration-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.550361 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.050341635 +0000 UTC m=+136.868271420 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.550543 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-mountpoint-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.550592 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-plugins-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.550604 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a533fee8-aa25-4eae-9eff-134d4b61033f-config-volume\") pod \"dns-default-vm8rp\" (UID: \"a533fee8-aa25-4eae-9eff-134d4b61033f\") " pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.550695 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-csi-data-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.550746 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-socket-dir\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.553220 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a533fee8-aa25-4eae-9eff-134d4b61033f-metrics-tls\") pod \"dns-default-vm8rp\" (UID: \"a533fee8-aa25-4eae-9eff-134d4b61033f\") " pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.554736 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/99f0a410-2f12-45d0-8b95-23552288ee49-cert\") pod \"ingress-canary-bnmt9\" (UID: \"99f0a410-2f12-45d0-8b95-23552288ee49\") " pod="openshift-ingress-canary/ingress-canary-bnmt9" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.573938 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgdz8\" (UniqueName: \"kubernetes.io/projected/4e035b58-1374-487b-9d9f-e9317793cdc7-kube-api-access-rgdz8\") pod \"openshift-controller-manager-operator-756b6f6bc6-h4kjb\" (UID: \"4e035b58-1374-487b-9d9f-e9317793cdc7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.603967 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mndkw\" (UniqueName: \"kubernetes.io/projected/99f0a410-2f12-45d0-8b95-23552288ee49-kube-api-access-mndkw\") pod \"ingress-canary-bnmt9\" (UID: \"99f0a410-2f12-45d0-8b95-23552288ee49\") " pod="openshift-ingress-canary/ingress-canary-bnmt9" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.613552 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.653291 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.655790 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.155771923 +0000 UTC m=+136.973701707 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.662062 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxknj\" (UniqueName: \"kubernetes.io/projected/47f9ce8e-00cf-4b1f-9a90-816f9eb7338f-kube-api-access-wxknj\") pod \"csi-hostpathplugin-9nsfq\" (UID: \"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f\") " pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.673797 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.682547 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz92z\" (UniqueName: \"kubernetes.io/projected/a533fee8-aa25-4eae-9eff-134d4b61033f-kube-api-access-lz92z\") pod \"dns-default-vm8rp\" (UID: \"a533fee8-aa25-4eae-9eff-134d4b61033f\") " pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.687050 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.714073 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.729369 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-rw7qg"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.755116 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.756147 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.256130589 +0000 UTC m=+137.074060373 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.765497 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.775101 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.777517 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-bnmt9" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.800170 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr"] Dec 11 10:20:05 crc kubenswrapper[4663]: W1211 10:20:05.803712 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58945b3f_8314_4251_a1e1_20dff39bf316.slice/crio-bce7e71118987eb1518a75f6a3327f61fb612f43b3764335893e3e43446bbbfa WatchSource:0}: Error finding container bce7e71118987eb1518a75f6a3327f61fb612f43b3764335893e3e43446bbbfa: Status 404 returned error can't find the container with id bce7e71118987eb1518a75f6a3327f61fb612f43b3764335893e3e43446bbbfa Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.808118 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vsdq4"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.858697 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.859039 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.359028102 +0000 UTC m=+137.176957885 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.861028 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.865225 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.876440 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7"] Dec 11 10:20:05 crc kubenswrapper[4663]: W1211 10:20:05.900967 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32a7449e_b310_406a_b03a_4e2e461d11ae.slice/crio-6217df8b65facc35743a60dcd979d596e9c151afc7037ea914203890129b16d1 WatchSource:0}: Error finding container 6217df8b65facc35743a60dcd979d596e9c151afc7037ea914203890129b16d1: Status 404 returned error can't find the container with id 6217df8b65facc35743a60dcd979d596e9c151afc7037ea914203890129b16d1 Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.957044 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dc8cp"] Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.959889 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.960049 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.460027963 +0000 UTC m=+137.277957747 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:05 crc kubenswrapper[4663]: I1211 10:20:05.960237 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:05 crc kubenswrapper[4663]: E1211 10:20:05.960538 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.460525322 +0000 UTC m=+137.278455106 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.060730 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:06 crc kubenswrapper[4663]: E1211 10:20:06.061263 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.561250119 +0000 UTC m=+137.379179904 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.077755 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-24p95"] Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.146674 4663 generic.go:334] "Generic (PLEG): container finished" podID="26fa94f3-6f45-4641-a0b5-b027b7db54c3" containerID="882a4fcf38219b65f619a09b13bcf00e313fc208bf3160049074ee3083076748" exitCode=0 Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.146810 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" event={"ID":"26fa94f3-6f45-4641-a0b5-b027b7db54c3","Type":"ContainerDied","Data":"882a4fcf38219b65f619a09b13bcf00e313fc208bf3160049074ee3083076748"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.147525 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-p968m"] Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.159671 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" event={"ID":"1fc12aa7-09ac-4720-b32e-920301ca04ee","Type":"ContainerStarted","Data":"c3bcdece6ebdee8998bb5249ad780e9d90f3c9b0cc55ce8af832ea9b0ce696cf"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.159938 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" event={"ID":"1fc12aa7-09ac-4720-b32e-920301ca04ee","Type":"ContainerStarted","Data":"c4777f88c411ff91c5e85feb1618552aeace31c61081e8626c1c8e924f15789d"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.164840 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-client-ca\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.164913 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-config\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.164948 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-serving-cert\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.164986 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:06 crc kubenswrapper[4663]: E1211 10:20:06.165288 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.665274823 +0000 UTC m=+137.483204608 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.165938 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-client-ca\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.167722 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-config\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.174230 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-serving-cert\") pod \"route-controller-manager-6576b87f9c-zvj6k\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.179614 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz"] Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.190221 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" event={"ID":"fc42c87c-f305-40f2-8d38-c9ef45c05bd0","Type":"ContainerStarted","Data":"440a8fbdd41e1d83c2b1a7c4818f371a77f776db30340461212d1936d30e4af5"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.204153 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-mn2pm" event={"ID":"46f7ebf5-734d-41f6-a8eb-1daf1dc0e014","Type":"ContainerStarted","Data":"c8a2bb665761457c59ab98783856ee61995cb67f65cce0d0b36a692e44e3cd08"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.204217 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-mn2pm" event={"ID":"46f7ebf5-734d-41f6-a8eb-1daf1dc0e014","Type":"ContainerStarted","Data":"65902e39509bb5bbc2bc366bb65faa3e3d421c5d85391bd3d2cfce8d551ce150"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.204621 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-mn2pm" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.205610 4663 patch_prober.go:28] interesting pod/downloads-7954f5f757-mn2pm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.205654 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-mn2pm" podUID="46f7ebf5-734d-41f6-a8eb-1daf1dc0e014" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.206547 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" event={"ID":"b16defcf-1335-4210-a647-bcaef5e0642f","Type":"ContainerStarted","Data":"77d4cc787787222113b8bc2acb10355abc3bece669ce7ea07f3045951357f18c"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.207916 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" event={"ID":"e3cf153e-0d08-489d-8d35-1170356831ee","Type":"ContainerStarted","Data":"a30ce2113340a47878f5fb01066ef5ce7bde4cfb98607ff139209fe1c3a8b5da"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.217305 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" event={"ID":"83732716-a7c1-4be4-a73a-1d7a9633af29","Type":"ContainerStarted","Data":"6fe5df49d39e927af8f955ee020ea81aec54e7fb79f89f4652dc56a826beef3a"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.223783 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw"] Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.233981 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" event={"ID":"10c2efee-1b70-4258-92f0-caba3a5cfd50","Type":"ContainerStarted","Data":"f750b2304f9a6db0a19f3235279f723ae0647045e563b1438059bc9f2220da08"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.234046 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" event={"ID":"10c2efee-1b70-4258-92f0-caba3a5cfd50","Type":"ContainerStarted","Data":"a86a39b09a2be2c112e153549336097a1a291cddd26cf8085ee2d4b0c3f4ccc9"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.238706 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb"] Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.238739 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vsdq4" event={"ID":"172795ed-fdc1-4567-ae54-f6980f7d2038","Type":"ContainerStarted","Data":"3a8e3e5f6ca0dafd974f4e2c959f0d374061102d654cd6564e17d90b43cc82d5"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.240401 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cb2kn"] Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.263918 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-vm8rp"] Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.267888 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:06 crc kubenswrapper[4663]: E1211 10:20:06.269196 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.769160533 +0000 UTC m=+137.587090318 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.299258 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" event={"ID":"b278aa37-9fc1-430d-9182-2dbf0f56b930","Type":"ContainerStarted","Data":"5f2e15ef3bc48bfff02a43785957a5ff8291f2fe60debd74eaab5951a5a35571"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.303658 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" event={"ID":"2a76c011-1596-4893-b01f-abf8e6e5295c","Type":"ContainerStarted","Data":"f40c7caad1d59535edf1e54952a1ab39a44d0a32799a20c5fa3a8bd0cdb8fb4c"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.312409 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" event={"ID":"b15591c3-541e-4daf-8e21-9b69db5cf260","Type":"ContainerStarted","Data":"eaa9c4f3b063825b6b6c5bc0975726bc9bbf7677c5558f74f59304063587137f"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.313053 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.317436 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" event={"ID":"9813fa82-894c-4380-8975-24771adf79e7","Type":"ContainerStarted","Data":"3b5b61683fa3d3d54468ea9dfdb92752ba930d02665567254ac8733beb6eabce"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.319222 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-nfh76" event={"ID":"ffff689f-0318-485f-9f8c-a8f2f86eebaf","Type":"ContainerStarted","Data":"c32ee9dc141c936a44342f7ba865da0918894bd404c8cebe18e8995192e33f6b"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.319248 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-nfh76" event={"ID":"ffff689f-0318-485f-9f8c-a8f2f86eebaf","Type":"ContainerStarted","Data":"4125d7727477ae9b62b7d8f3ce971fc8e554c1f42a22a2f27b0f361cc7254697"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.334780 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff" event={"ID":"14c24a3b-a966-47ff-a122-50abb423e4b3","Type":"ContainerStarted","Data":"c659993122563c3203540c7cdc1e204c3073b736c2c77692eb7678acca6a44e2"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.334831 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff" event={"ID":"14c24a3b-a966-47ff-a122-50abb423e4b3","Type":"ContainerStarted","Data":"68ded4d19b8356b8935255c8c8f861da4086cd63db48b8fa68b6adbe46096a86"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.337417 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" event={"ID":"14f276e8-14d3-4e67-956d-ad6212f45c69","Type":"ContainerStarted","Data":"4ddcddc85dfecf9912c23f042a2d1e6d6f92b3e656be1bc0d9d90b8e72fcf57a"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.337725 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.341533 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" event={"ID":"3885a18d-c5e3-484e-8950-878e57b1bb43","Type":"ContainerStarted","Data":"82b84c60dac1c217111e58591190544bf10652c6be898f6d70ec6b2189cbf1ab"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.341712 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" event={"ID":"3885a18d-c5e3-484e-8950-878e57b1bb43","Type":"ContainerStarted","Data":"10e60e93fbc0cbee1151b2e388d19ddfe4c6e4e6290b7dd02bb3c6f5f8f75bb8"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.344586 4663 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-n6dwq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.344838 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr" event={"ID":"32a7449e-b310-406a-b03a-4e2e461d11ae","Type":"ContainerStarted","Data":"6217df8b65facc35743a60dcd979d596e9c151afc7037ea914203890129b16d1"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.345227 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" podUID="14f276e8-14d3-4e67-956d-ad6212f45c69" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.348265 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dc8cp" event={"ID":"d18be0a7-2aaa-4aaa-9d37-f44453e994d8","Type":"ContainerStarted","Data":"ad0b211d0cb87c19996d48bd7b621ef4d708b5586d535220e706e4c893409a10"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.359807 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" event={"ID":"1001a5ac-b44d-41c2-979f-a987c9c6d63f","Type":"ContainerStarted","Data":"5992929c3dcb7ceeb428ed8c30ce47f7c14e193aac648309649123f6e46a96d9"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.359864 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" event={"ID":"1001a5ac-b44d-41c2-979f-a987c9c6d63f","Type":"ContainerStarted","Data":"32140eb990fe744eebb99fe56bacacfee2ef6141530e8e650d861f192b5b7970"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.361783 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" event={"ID":"2bb618f2-adb1-40ff-a336-2520519233b7","Type":"ContainerStarted","Data":"2eacdd730aafa1c3a41770598e83a54854253bb1da847c3a271c9ac6e5f5ca3e"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.363379 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" event={"ID":"58945b3f-8314-4251-a1e1-20dff39bf316","Type":"ContainerStarted","Data":"bce7e71118987eb1518a75f6a3327f61fb612f43b3764335893e3e43446bbbfa"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.365331 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-n9pwg" event={"ID":"3a676c5e-7428-4f37-9aa2-0755611c6ff3","Type":"ContainerStarted","Data":"e18538bfa0ee0e8a26bc31d9f118d20d5781d8233bb3167b6bed00c626cceac3"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.365371 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-n9pwg" event={"ID":"3a676c5e-7428-4f37-9aa2-0755611c6ff3","Type":"ContainerStarted","Data":"3ff0b4d126efe58fdc514da4f5434b6ebecc895d3c38ebba0312bc4dca927ba2"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.370576 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:06 crc kubenswrapper[4663]: E1211 10:20:06.371317 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.871305131 +0000 UTC m=+137.689234915 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.373433 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" event={"ID":"a4cc9117-6286-44ba-a590-893cff694a7c","Type":"ContainerStarted","Data":"03f968a22326f30ba6ce154bc405c4ab4d6daeefb8d9bf488dbd5677609e702d"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.375173 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" event={"ID":"d87db89f-40fd-4e1c-a959-e167edef0c19","Type":"ContainerStarted","Data":"e58792b1eaa7b397781bfbd520243898a863c0bd36110f4f1ee196dbc61f7e56"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.375233 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" event={"ID":"d87db89f-40fd-4e1c-a959-e167edef0c19","Type":"ContainerStarted","Data":"56c160e7b8246a35f120f62fc262b9a26e8485b99529e34766c0269ff7ff23d9"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.378753 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-rw7qg" event={"ID":"44909f31-73c1-4282-a2f6-e93bc1bf6d32","Type":"ContainerStarted","Data":"acc16f9a043188737c0bb92d7970a88074c3c12537119de03899f90344e3f5ec"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.385030 4663 generic.go:334] "Generic (PLEG): container finished" podID="2c2ceeb7-b3cc-4085-8591-0e0c42a01c34" containerID="736917829f67ea2b53aaa4b3db11372e5ab1b982a0c118f07e51956993f6b918" exitCode=0 Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.385093 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" event={"ID":"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34","Type":"ContainerDied","Data":"736917829f67ea2b53aaa4b3db11372e5ab1b982a0c118f07e51956993f6b918"} Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.387956 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" event={"ID":"a35ea846-6e87-40fe-8998-037fb7bd2060","Type":"ContainerStarted","Data":"77a80f0bfb16b06c98a3914713ddc8490951e399e0e58f010f4c43cd63e853ea"} Dec 11 10:20:06 crc kubenswrapper[4663]: W1211 10:20:06.396491 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda533fee8_aa25_4eae_9eff_134d4b61033f.slice/crio-f4eab02c589d072ed1f5e27d23277085656a4c287ea3656bfce36f266f1787fd WatchSource:0}: Error finding container f4eab02c589d072ed1f5e27d23277085656a4c287ea3656bfce36f266f1787fd: Status 404 returned error can't find the container with id f4eab02c589d072ed1f5e27d23277085656a4c287ea3656bfce36f266f1787fd Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.400383 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-xhhrv" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.471322 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:06 crc kubenswrapper[4663]: E1211 10:20:06.474074 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:06.974049711 +0000 UTC m=+137.791979494 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.567088 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9nsfq"] Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.574058 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:06 crc kubenswrapper[4663]: E1211 10:20:06.574498 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:07.074486907 +0000 UTC m=+137.892416691 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.628630 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-cbp5t" podStartSLOduration=120.628614963 podStartE2EDuration="2m0.628614963s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:06.628149779 +0000 UTC m=+137.446079563" watchObservedRunningTime="2025-12-11 10:20:06.628614963 +0000 UTC m=+137.446544747" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.658145 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-bf8b4" podStartSLOduration=119.65812909 podStartE2EDuration="1m59.65812909s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:06.65677769 +0000 UTC m=+137.474707473" watchObservedRunningTime="2025-12-11 10:20:06.65812909 +0000 UTC m=+137.476058874" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.675437 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:06 crc kubenswrapper[4663]: E1211 10:20:06.675805 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:07.175788718 +0000 UTC m=+137.993718501 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.721157 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-bnmt9"] Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.724258 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb"] Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.741682 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" podStartSLOduration=119.741669546 podStartE2EDuration="1m59.741669546s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:06.74025255 +0000 UTC m=+137.558182334" watchObservedRunningTime="2025-12-11 10:20:06.741669546 +0000 UTC m=+137.559599330" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.776929 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:06 crc kubenswrapper[4663]: E1211 10:20:06.777414 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:07.277399982 +0000 UTC m=+138.095329766 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:06 crc kubenswrapper[4663]: W1211 10:20:06.818937 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99f0a410_2f12_45d0_8b95_23552288ee49.slice/crio-92db6851cef5c66cfb95fb084f77413f936f5bc94dd933ec50531060b3a7ed97 WatchSource:0}: Error finding container 92db6851cef5c66cfb95fb084f77413f936f5bc94dd933ec50531060b3a7ed97: Status 404 returned error can't find the container with id 92db6851cef5c66cfb95fb084f77413f936f5bc94dd933ec50531060b3a7ed97 Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.822435 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-xhhrv" podStartSLOduration=120.822417795 podStartE2EDuration="2m0.822417795s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:06.820003924 +0000 UTC m=+137.637933708" watchObservedRunningTime="2025-12-11 10:20:06.822417795 +0000 UTC m=+137.640347579" Dec 11 10:20:06 crc kubenswrapper[4663]: W1211 10:20:06.827565 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e035b58_1374_487b_9d9f_e9317793cdc7.slice/crio-f000547963f36c49acf0b7f9c1092f46877bc0046a5a4fa050b16c766a425784 WatchSource:0}: Error finding container f000547963f36c49acf0b7f9c1092f46877bc0046a5a4fa050b16c766a425784: Status 404 returned error can't find the container with id f000547963f36c49acf0b7f9c1092f46877bc0046a5a4fa050b16c766a425784 Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.882216 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:06 crc kubenswrapper[4663]: E1211 10:20:06.882361 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:07.382337819 +0000 UTC m=+138.200267603 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.883137 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:06 crc kubenswrapper[4663]: E1211 10:20:06.883811 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:07.383799907 +0000 UTC m=+138.201729691 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.920122 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k"] Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.976426 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.985214 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:06 crc kubenswrapper[4663]: E1211 10:20:06.985587 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:07.485572273 +0000 UTC m=+138.303502057 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:06 crc kubenswrapper[4663]: I1211 10:20:06.986394 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zwlfp" podStartSLOduration=119.986376461 podStartE2EDuration="1m59.986376461s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:06.938595042 +0000 UTC m=+137.756524826" watchObservedRunningTime="2025-12-11 10:20:06.986376461 +0000 UTC m=+137.804306246" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.002723 4663 patch_prober.go:28] interesting pod/router-default-5444994796-n9pwg container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 10:20:07 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 11 10:20:07 crc kubenswrapper[4663]: [+]process-running ok Dec 11 10:20:07 crc kubenswrapper[4663]: healthz check failed Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.003509 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n9pwg" podUID="3a676c5e-7428-4f37-9aa2-0755611c6ff3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.020980 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" podStartSLOduration=120.020969204 podStartE2EDuration="2m0.020969204s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.020462295 +0000 UTC m=+137.838392099" watchObservedRunningTime="2025-12-11 10:20:07.020969204 +0000 UTC m=+137.838898988" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.059362 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mzhff" podStartSLOduration=121.059351028 podStartE2EDuration="2m1.059351028s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.058855512 +0000 UTC m=+137.876785296" watchObservedRunningTime="2025-12-11 10:20:07.059351028 +0000 UTC m=+137.877280812" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.088903 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:07 crc kubenswrapper[4663]: E1211 10:20:07.089265 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:07.589251918 +0000 UTC m=+138.407181693 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.201701 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:07 crc kubenswrapper[4663]: E1211 10:20:07.202264 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:07.702249695 +0000 UTC m=+138.520179479 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.304719 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:07 crc kubenswrapper[4663]: E1211 10:20:07.305462 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:07.805446351 +0000 UTC m=+138.623376136 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.371719 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-n9pwg" podStartSLOduration=120.371701067 podStartE2EDuration="2m0.371701067s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.354321565 +0000 UTC m=+138.172251349" watchObservedRunningTime="2025-12-11 10:20:07.371701067 +0000 UTC m=+138.189630841" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.408293 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nn574" podStartSLOduration=120.40595764 podStartE2EDuration="2m0.40595764s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.371310105 +0000 UTC m=+138.189239889" watchObservedRunningTime="2025-12-11 10:20:07.40595764 +0000 UTC m=+138.223887425" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.409488 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:07 crc kubenswrapper[4663]: E1211 10:20:07.409951 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:07.909935328 +0000 UTC m=+138.727865112 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.429064 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-g29lh" podStartSLOduration=120.429043856 podStartE2EDuration="2m0.429043856s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.408934245 +0000 UTC m=+138.226864029" watchObservedRunningTime="2025-12-11 10:20:07.429043856 +0000 UTC m=+138.246973640" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.497227 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-nfh76" podStartSLOduration=5.497210201 podStartE2EDuration="5.497210201s" podCreationTimestamp="2025-12-11 10:20:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.495781782 +0000 UTC m=+138.313711566" watchObservedRunningTime="2025-12-11 10:20:07.497210201 +0000 UTC m=+138.315139975" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.497792 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" event={"ID":"a35ea846-6e87-40fe-8998-037fb7bd2060","Type":"ContainerStarted","Data":"8d5536f0db0229b97d73dc497614d4fb1e59ca80daf2f1ea36e2fb5d350d4f41"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.511895 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:07 crc kubenswrapper[4663]: E1211 10:20:07.512266 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:08.012255387 +0000 UTC m=+138.830185170 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.536857 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-mn2pm" podStartSLOduration=121.536841167 podStartE2EDuration="2m1.536841167s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.535589971 +0000 UTC m=+138.353519756" watchObservedRunningTime="2025-12-11 10:20:07.536841167 +0000 UTC m=+138.354770950" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.579579 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" event={"ID":"2bb618f2-adb1-40ff-a336-2520519233b7","Type":"ContainerStarted","Data":"17e7469506961328d757214250289ebc8ac72774c5c8fba34c00970fa1faf006"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.582778 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8mrgz" podStartSLOduration=120.582764296 podStartE2EDuration="2m0.582764296s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.582462868 +0000 UTC m=+138.400392653" watchObservedRunningTime="2025-12-11 10:20:07.582764296 +0000 UTC m=+138.400694081" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.593325 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" event={"ID":"83732716-a7c1-4be4-a73a-1d7a9633af29","Type":"ContainerStarted","Data":"f3d6a97d70c4951ded9f08013b19162ab185da0fdf0a1a4104c904e0c2bded08"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.604832 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" event={"ID":"507a4649-4e86-435b-8ceb-9587a90bc48c","Type":"ContainerStarted","Data":"e41088f0902c4f7249c13d462387c12ddb44e50a0b6c41d6e27fdd8bc1c3a5c5"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.604864 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" event={"ID":"507a4649-4e86-435b-8ceb-9587a90bc48c","Type":"ContainerStarted","Data":"23b214cf1cf23e9de5221a98a782895f67d928984ea9219f475a6e2102a754d3"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.606579 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.612376 4663 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-25wmw container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:5443/healthz\": dial tcp 10.217.0.19:5443: connect: connection refused" start-of-body= Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.612455 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" podUID="507a4649-4e86-435b-8ceb-9587a90bc48c" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.19:5443/healthz\": dial tcp 10.217.0.19:5443: connect: connection refused" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.613023 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:07 crc kubenswrapper[4663]: E1211 10:20:07.614104 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:08.114087204 +0000 UTC m=+138.932016988 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.648470 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr" event={"ID":"32a7449e-b310-406a-b03a-4e2e461d11ae","Type":"ContainerStarted","Data":"5c7bda01ce0c0c1eaf5179cf86d497cb943466e56d6e32e8361e3d8601810c0c"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.662336 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x8ln7" podStartSLOduration=121.662315671 podStartE2EDuration="2m1.662315671s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.625853654 +0000 UTC m=+138.443783438" watchObservedRunningTime="2025-12-11 10:20:07.662315671 +0000 UTC m=+138.480245455" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.694011 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" event={"ID":"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a","Type":"ContainerStarted","Data":"72f3ad4ea34de768847e4e9cfdf3d89679a09bc47cf5c934254281fc7b52dd6e"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.694078 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" event={"ID":"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a","Type":"ContainerStarted","Data":"974e4ccacc804c660063bcb219e7aad8805e4206f1f6163f4584fdbab2df98bf"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.705630 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-vm8rp" event={"ID":"a533fee8-aa25-4eae-9eff-134d4b61033f","Type":"ContainerStarted","Data":"f4eab02c589d072ed1f5e27d23277085656a4c287ea3656bfce36f266f1787fd"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.711788 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-75lmn" podStartSLOduration=120.711776915 podStartE2EDuration="2m0.711776915s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.710904276 +0000 UTC m=+138.528834060" watchObservedRunningTime="2025-12-11 10:20:07.711776915 +0000 UTC m=+138.529706700" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.712049 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" podStartSLOduration=120.712043833 podStartE2EDuration="2m0.712043833s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.661386505 +0000 UTC m=+138.479316288" watchObservedRunningTime="2025-12-11 10:20:07.712043833 +0000 UTC m=+138.529973617" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.714278 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:07 crc kubenswrapper[4663]: E1211 10:20:07.715812 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:08.215796379 +0000 UTC m=+139.033726163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.731655 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" event={"ID":"905be650-0ed4-45ef-984c-00ab31ee178b","Type":"ContainerStarted","Data":"a7646331c8252031fba8844e978d7aeb441f82c7d56f392df6c00d2c70940c10"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.731693 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" event={"ID":"905be650-0ed4-45ef-984c-00ab31ee178b","Type":"ContainerStarted","Data":"8b11a52115915ce6656c38b1742a7ee700c29eae7f5e67e9f78401d49386f3ab"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.748074 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fwjxr" podStartSLOduration=120.748056208 podStartE2EDuration="2m0.748056208s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.744593165 +0000 UTC m=+138.562522949" watchObservedRunningTime="2025-12-11 10:20:07.748056208 +0000 UTC m=+138.565985992" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.760446 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dc8cp" event={"ID":"d18be0a7-2aaa-4aaa-9d37-f44453e994d8","Type":"ContainerStarted","Data":"4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.784610 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" podStartSLOduration=120.784593117 podStartE2EDuration="2m0.784593117s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.781484432 +0000 UTC m=+138.599414216" watchObservedRunningTime="2025-12-11 10:20:07.784593117 +0000 UTC m=+138.602522901" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.789424 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" event={"ID":"14f276e8-14d3-4e67-956d-ad6212f45c69","Type":"ContainerStarted","Data":"a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.809881 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.814998 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" event={"ID":"7f7716ca-ea2e-45ea-a861-d592b5c077ac","Type":"ContainerStarted","Data":"4a36ea0da9ffbb11fca2bb883e0d8f1e64701c445e7f3440dd1e2c7d242abbf1"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.815562 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:07 crc kubenswrapper[4663]: E1211 10:20:07.815748 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:08.315719351 +0000 UTC m=+139.133649136 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.816050 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:07 crc kubenswrapper[4663]: E1211 10:20:07.817225 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:08.317211902 +0000 UTC m=+139.135141686 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.843790 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" event={"ID":"b16defcf-1335-4210-a647-bcaef5e0642f","Type":"ContainerStarted","Data":"dba2f01cb5fa15d262180b03d6aba88d9c2a48eb05bb6060fb9b14b16893d162"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.843848 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" event={"ID":"b16defcf-1335-4210-a647-bcaef5e0642f","Type":"ContainerStarted","Data":"4ca9ea1830d9ba213d42a7d6e402011294ac270051b44983756389becd089a45"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.858671 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-dc8cp" podStartSLOduration=121.858655845 podStartE2EDuration="2m1.858655845s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.85383523 +0000 UTC m=+138.671765014" watchObservedRunningTime="2025-12-11 10:20:07.858655845 +0000 UTC m=+138.676585629" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.867445 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" event={"ID":"2a76c011-1596-4893-b01f-abf8e6e5295c","Type":"ContainerStarted","Data":"459152adecd995d78ba27309f1048e77df3c5f5d29a5bd71125e87fe062ca9d9"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.881023 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" podStartSLOduration=120.881009779 podStartE2EDuration="2m0.881009779s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.880426503 +0000 UTC m=+138.698356287" watchObservedRunningTime="2025-12-11 10:20:07.881009779 +0000 UTC m=+138.698939563" Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.916717 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:07 crc kubenswrapper[4663]: E1211 10:20:07.917884 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:08.417866805 +0000 UTC m=+139.235796589 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.938723 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" event={"ID":"26fa94f3-6f45-4641-a0b5-b027b7db54c3","Type":"ContainerStarted","Data":"55225c40eb9fe57fb67f7d543cfe3c655f43883ec2644e767139ccf69ef4be60"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.950574 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-rw7qg" event={"ID":"44909f31-73c1-4282-a2f6-e93bc1bf6d32","Type":"ContainerStarted","Data":"75d8d0725a2fa5025e1a56324eba2bdb2de4d28c12ab77b3a7f31be901aa65b7"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.977460 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" event={"ID":"58945b3f-8314-4251-a1e1-20dff39bf316","Type":"ContainerStarted","Data":"4641eee4c29ca9a539ccc6e96b626289bb689855e4cfa6f3ea8c146252805f64"} Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.987531 4663 patch_prober.go:28] interesting pod/router-default-5444994796-n9pwg container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 10:20:07 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 11 10:20:07 crc kubenswrapper[4663]: [+]process-running ok Dec 11 10:20:07 crc kubenswrapper[4663]: healthz check failed Dec 11 10:20:07 crc kubenswrapper[4663]: I1211 10:20:07.987583 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n9pwg" podUID="3a676c5e-7428-4f37-9aa2-0755611c6ff3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.003388 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" event={"ID":"1fc12aa7-09ac-4720-b32e-920301ca04ee","Type":"ContainerStarted","Data":"eab36667b4de236d2850f9d20732382dabfc7110289d9e6f1fece401ad2ee2d3"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.018257 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.019208 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" event={"ID":"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f","Type":"ContainerStarted","Data":"9a7765f0f3c6f494e2f5dd409d3d164d402d8730fc8446ed45623684353fe258"} Dec 11 10:20:08 crc kubenswrapper[4663]: E1211 10:20:08.020694 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:08.520681498 +0000 UTC m=+139.338611273 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.022327 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pnbrx" podStartSLOduration=121.022307272 podStartE2EDuration="2m1.022307272s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:07.96561101 +0000 UTC m=+138.783540793" watchObservedRunningTime="2025-12-11 10:20:08.022307272 +0000 UTC m=+138.840237056" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.023225 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" podStartSLOduration=122.023220436 podStartE2EDuration="2m2.023220436s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.014918279 +0000 UTC m=+138.832848063" watchObservedRunningTime="2025-12-11 10:20:08.023220436 +0000 UTC m=+138.841150219" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.049582 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" event={"ID":"10c2efee-1b70-4258-92f0-caba3a5cfd50","Type":"ContainerStarted","Data":"86f71df5dfa044fab52ce2c7658977b5b9d8d2d69cf87407877a4b43c98f6b4b"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.050395 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.068860 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9kh7f" podStartSLOduration=121.068850293 podStartE2EDuration="2m1.068850293s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.068269451 +0000 UTC m=+138.886199226" watchObservedRunningTime="2025-12-11 10:20:08.068850293 +0000 UTC m=+138.886780077" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.069705 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" podStartSLOduration=121.069699785 podStartE2EDuration="2m1.069699785s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.047250907 +0000 UTC m=+138.865180691" watchObservedRunningTime="2025-12-11 10:20:08.069699785 +0000 UTC m=+138.887629570" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.079228 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vsdq4" event={"ID":"172795ed-fdc1-4567-ae54-f6980f7d2038","Type":"ContainerStarted","Data":"36e20462b2984f13178624b43a4e7ee0bec5f4855885f63600a7734c962bbcfd"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.079272 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vsdq4" event={"ID":"172795ed-fdc1-4567-ae54-f6980f7d2038","Type":"ContainerStarted","Data":"c3bf281d8c6eafeec58700b6584c97aade6b90394b64ffb1fbe988e9ae1307ba"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.108421 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-rw7qg" podStartSLOduration=121.108404381 podStartE2EDuration="2m1.108404381s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.106812958 +0000 UTC m=+138.924742731" watchObservedRunningTime="2025-12-11 10:20:08.108404381 +0000 UTC m=+138.926334165" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.115685 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" event={"ID":"c7f23b6b-b7a1-46f4-b553-37be88f8fae0","Type":"ContainerStarted","Data":"1eca88383e729bb647bdefa3cc8b092b597bcefabd92106fda0d7dd4accbf3ac"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.116630 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.119292 4663 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-zvj6k container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.119401 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" podUID="c7f23b6b-b7a1-46f4-b553-37be88f8fae0" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.119998 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:08 crc kubenswrapper[4663]: E1211 10:20:08.121842 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:08.621830235 +0000 UTC m=+139.439760019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.143650 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-bnmt9" event={"ID":"99f0a410-2f12-45d0-8b95-23552288ee49","Type":"ContainerStarted","Data":"92db6851cef5c66cfb95fb084f77413f936f5bc94dd933ec50531060b3a7ed97"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.166147 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" event={"ID":"b15591c3-541e-4daf-8e21-9b69db5cf260","Type":"ContainerStarted","Data":"28a4a8a3f9c056c7f5d084e5abddd28f327fad286bc30422a8154a3036396db5"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.173317 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" podStartSLOduration=121.173303366 podStartE2EDuration="2m1.173303366s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.160481391 +0000 UTC m=+138.978411175" watchObservedRunningTime="2025-12-11 10:20:08.173303366 +0000 UTC m=+138.991233150" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.176524 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" event={"ID":"f64a0f1e-3264-4862-ad3b-097bf139855c","Type":"ContainerStarted","Data":"cc54a1746422cc5797e3f81ac4f84d4bbd9419aea6a955f1be4307da6ffb72ef"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.176585 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" event={"ID":"f64a0f1e-3264-4862-ad3b-097bf139855c","Type":"ContainerStarted","Data":"1a87bc5f6658e62a5c4809d1a9a7d2fc5a1e068d35beee9bbe11f99582500d8d"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.177609 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.185915 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" event={"ID":"4e035b58-1374-487b-9d9f-e9317793cdc7","Type":"ContainerStarted","Data":"f000547963f36c49acf0b7f9c1092f46877bc0046a5a4fa050b16c766a425784"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.200689 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" event={"ID":"9813fa82-894c-4380-8975-24771adf79e7","Type":"ContainerStarted","Data":"1dfd50b8fae7b7e1a9655ea9e8afbca69aebe066db902e3d6d2b548d798604f3"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.201619 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.210049 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.210273 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" event={"ID":"a4cc9117-6286-44ba-a590-893cff694a7c","Type":"ContainerStarted","Data":"9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.211295 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.225789 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.226294 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vsdq4" podStartSLOduration=121.22627935 podStartE2EDuration="2m1.22627935s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.219534357 +0000 UTC m=+139.037464140" watchObservedRunningTime="2025-12-11 10:20:08.22627935 +0000 UTC m=+139.044209133" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.227224 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.227483 4663 generic.go:334] "Generic (PLEG): container finished" podID="e3cf153e-0d08-489d-8d35-1170356831ee" containerID="88d121c3b9b5f741d1578d4ddedba3252e7e71039cb119b8a0199373adebdce9" exitCode=0 Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.227661 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" event={"ID":"e3cf153e-0d08-489d-8d35-1170356831ee","Type":"ContainerDied","Data":"88d121c3b9b5f741d1578d4ddedba3252e7e71039cb119b8a0199373adebdce9"} Dec 11 10:20:08 crc kubenswrapper[4663]: E1211 10:20:08.228479 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:08.728462817 +0000 UTC m=+139.546392601 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.253698 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" event={"ID":"b278aa37-9fc1-430d-9182-2dbf0f56b930","Type":"ContainerStarted","Data":"4888feced659f67bc11671b3a7463fdf19518d7425027444ca6122d530055c88"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.272828 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fdg9t" podStartSLOduration=121.272814674 podStartE2EDuration="2m1.272814674s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.271093594 +0000 UTC m=+139.089023379" watchObservedRunningTime="2025-12-11 10:20:08.272814674 +0000 UTC m=+139.090744458" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.274577 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" event={"ID":"05009b2f-f136-4f9d-8d58-169816bb1ea6","Type":"ContainerStarted","Data":"e4fff5a3f5be54137cdcf6a3002394c1c909cd92de49770bfc4f3ba925121246"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.274618 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" event={"ID":"05009b2f-f136-4f9d-8d58-169816bb1ea6","Type":"ContainerStarted","Data":"52015d20c4bd3ea97be9512c68b935b2ae9290ae3845b72f175f5bd7a4638424"} Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.277552 4663 patch_prober.go:28] interesting pod/downloads-7954f5f757-mn2pm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.277610 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-mn2pm" podUID="46f7ebf5-734d-41f6-a8eb-1daf1dc0e014" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.327773 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:08 crc kubenswrapper[4663]: E1211 10:20:08.328279 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:08.828263499 +0000 UTC m=+139.646193282 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.328696 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:08 crc kubenswrapper[4663]: E1211 10:20:08.334233 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:08.834207638 +0000 UTC m=+139.652137423 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.429599 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:08 crc kubenswrapper[4663]: E1211 10:20:08.430000 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:08.929981852 +0000 UTC m=+139.747911637 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.492830 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kw96j" podStartSLOduration=121.492812136 podStartE2EDuration="2m1.492812136s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.420681831 +0000 UTC m=+139.238611614" watchObservedRunningTime="2025-12-11 10:20:08.492812136 +0000 UTC m=+139.310741920" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.502419 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.531838 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:08 crc kubenswrapper[4663]: E1211 10:20:08.532212 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.032199561 +0000 UTC m=+139.850129345 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.602116 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" podStartSLOduration=121.602099212 podStartE2EDuration="2m1.602099212s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.494778398 +0000 UTC m=+139.312708172" watchObservedRunningTime="2025-12-11 10:20:08.602099212 +0000 UTC m=+139.420028996" Dec 11 10:20:08 crc kubenswrapper[4663]: E1211 10:20:08.633093 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.133079417 +0000 UTC m=+139.951009190 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.633013 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.633436 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:08 crc kubenswrapper[4663]: E1211 10:20:08.633753 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.133745602 +0000 UTC m=+139.951675387 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.636700 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" podStartSLOduration=121.636688167 podStartE2EDuration="2m1.636688167s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.601491466 +0000 UTC m=+139.419421250" watchObservedRunningTime="2025-12-11 10:20:08.636688167 +0000 UTC m=+139.454617951" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.686104 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zxbz5" podStartSLOduration=121.686082465 podStartE2EDuration="2m1.686082465s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.640412966 +0000 UTC m=+139.458342750" watchObservedRunningTime="2025-12-11 10:20:08.686082465 +0000 UTC m=+139.504012240" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.686846 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zzlmb" podStartSLOduration=121.686838044 podStartE2EDuration="2m1.686838044s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.686686414 +0000 UTC m=+139.504616198" watchObservedRunningTime="2025-12-11 10:20:08.686838044 +0000 UTC m=+139.504767829" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.734563 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:08 crc kubenswrapper[4663]: E1211 10:20:08.734995 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.234949613 +0000 UTC m=+140.052879396 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.796468 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" podStartSLOduration=122.796452091 podStartE2EDuration="2m2.796452091s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.789540667 +0000 UTC m=+139.607470452" watchObservedRunningTime="2025-12-11 10:20:08.796452091 +0000 UTC m=+139.614381876" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.836896 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:08 crc kubenswrapper[4663]: E1211 10:20:08.837261 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.33724982 +0000 UTC m=+140.155179604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.856104 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-26pzr" podStartSLOduration=122.856083825 podStartE2EDuration="2m2.856083825s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.821483175 +0000 UTC m=+139.639412960" watchObservedRunningTime="2025-12-11 10:20:08.856083825 +0000 UTC m=+139.674013609" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.856422 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" podStartSLOduration=121.856416277 podStartE2EDuration="2m1.856416277s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.854892682 +0000 UTC m=+139.672822467" watchObservedRunningTime="2025-12-11 10:20:08.856416277 +0000 UTC m=+139.674346060" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.937618 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:08 crc kubenswrapper[4663]: E1211 10:20:08.937968 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.437954876 +0000 UTC m=+140.255884660 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.955952 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-bnmt9" podStartSLOduration=6.9559292280000005 podStartE2EDuration="6.955929228s" podCreationTimestamp="2025-12-11 10:20:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:08.897255761 +0000 UTC m=+139.715185545" watchObservedRunningTime="2025-12-11 10:20:08.955929228 +0000 UTC m=+139.773859012" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.981108 4663 patch_prober.go:28] interesting pod/router-default-5444994796-n9pwg container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 10:20:08 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 11 10:20:08 crc kubenswrapper[4663]: [+]process-running ok Dec 11 10:20:08 crc kubenswrapper[4663]: healthz check failed Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.981156 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n9pwg" podUID="3a676c5e-7428-4f37-9aa2-0755611c6ff3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.997726 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mlbtj"] Dec 11 10:20:08 crc kubenswrapper[4663]: I1211 10:20:08.998581 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.000722 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.025988 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mlbtj"] Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.039356 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-974kp\" (UniqueName: \"kubernetes.io/projected/3b63171f-dadd-465c-98c6-0197a59336a9-kube-api-access-974kp\") pod \"community-operators-mlbtj\" (UID: \"3b63171f-dadd-465c-98c6-0197a59336a9\") " pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.039571 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b63171f-dadd-465c-98c6-0197a59336a9-catalog-content\") pod \"community-operators-mlbtj\" (UID: \"3b63171f-dadd-465c-98c6-0197a59336a9\") " pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.039703 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.039857 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b63171f-dadd-465c-98c6-0197a59336a9-utilities\") pod \"community-operators-mlbtj\" (UID: \"3b63171f-dadd-465c-98c6-0197a59336a9\") " pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.039954 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.539941271 +0000 UTC m=+140.357871055 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.140817 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.141086 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.641059213 +0000 UTC m=+140.458988997 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.141243 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b63171f-dadd-465c-98c6-0197a59336a9-utilities\") pod \"community-operators-mlbtj\" (UID: \"3b63171f-dadd-465c-98c6-0197a59336a9\") " pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.141326 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-974kp\" (UniqueName: \"kubernetes.io/projected/3b63171f-dadd-465c-98c6-0197a59336a9-kube-api-access-974kp\") pod \"community-operators-mlbtj\" (UID: \"3b63171f-dadd-465c-98c6-0197a59336a9\") " pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.141370 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b63171f-dadd-465c-98c6-0197a59336a9-catalog-content\") pod \"community-operators-mlbtj\" (UID: \"3b63171f-dadd-465c-98c6-0197a59336a9\") " pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.141400 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.141675 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.641664515 +0000 UTC m=+140.459594300 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.142006 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b63171f-dadd-465c-98c6-0197a59336a9-utilities\") pod \"community-operators-mlbtj\" (UID: \"3b63171f-dadd-465c-98c6-0197a59336a9\") " pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.142487 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b63171f-dadd-465c-98c6-0197a59336a9-catalog-content\") pod \"community-operators-mlbtj\" (UID: \"3b63171f-dadd-465c-98c6-0197a59336a9\") " pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.174894 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-974kp\" (UniqueName: \"kubernetes.io/projected/3b63171f-dadd-465c-98c6-0197a59336a9-kube-api-access-974kp\") pod \"community-operators-mlbtj\" (UID: \"3b63171f-dadd-465c-98c6-0197a59336a9\") " pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.186750 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-p4fsq"] Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.187572 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.199162 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.213046 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p4fsq"] Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.242473 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.242671 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f822dbf-f355-45f9-b293-f1ab29619ed7-catalog-content\") pod \"certified-operators-p4fsq\" (UID: \"8f822dbf-f355-45f9-b293-f1ab29619ed7\") " pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.242705 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4rjd\" (UniqueName: \"kubernetes.io/projected/8f822dbf-f355-45f9-b293-f1ab29619ed7-kube-api-access-w4rjd\") pod \"certified-operators-p4fsq\" (UID: \"8f822dbf-f355-45f9-b293-f1ab29619ed7\") " pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.242729 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f822dbf-f355-45f9-b293-f1ab29619ed7-utilities\") pod \"certified-operators-p4fsq\" (UID: \"8f822dbf-f355-45f9-b293-f1ab29619ed7\") " pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.242848 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.742834294 +0000 UTC m=+140.560764078 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.287310 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" event={"ID":"2c2ceeb7-b3cc-4085-8591-0e0c42a01c34","Type":"ContainerStarted","Data":"2a9d1cfef1a127bbe141b38dae0cf4699d4e67f25a9cf88740fa53536c2ded8d"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.289869 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-rw7qg" event={"ID":"44909f31-73c1-4282-a2f6-e93bc1bf6d32","Type":"ContainerStarted","Data":"d873df2114f4a048bed05c6e01e5cfddc7df5c67b37e547239c3c9689918329b"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.292327 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" event={"ID":"26fa94f3-6f45-4641-a0b5-b027b7db54c3","Type":"ContainerStarted","Data":"179df3eeb4cbbe0a696112537c3ac4bc7202156540d3bdf6168d53ec2e0111c8"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.294137 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" event={"ID":"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f","Type":"ContainerStarted","Data":"7ca18cd47a8a893dcec9eeb75718af231d71cc09511cc03e7da3f9a6b59ac51b"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.295336 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h4kjb" event={"ID":"4e035b58-1374-487b-9d9f-e9317793cdc7","Type":"ContainerStarted","Data":"194e0be3f49199a28210d3c35f074a5af3ebfe35914b5841d7618dcc49d6f988"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.296664 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-p968m" event={"ID":"44eee7e7-6cdd-40bf-a998-ed2c4ae7884a","Type":"ContainerStarted","Data":"80736b7e0571c9696246521d1f1b38d5d8890a02ab72168b8d09f71400364798"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.298573 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" event={"ID":"e3cf153e-0d08-489d-8d35-1170356831ee","Type":"ContainerStarted","Data":"0ec3e0013597a63d09db5cf882ce04c1ba67ad521cc7a6b903d1f10addd1cd07"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.298842 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.299833 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" event={"ID":"905be650-0ed4-45ef-984c-00ab31ee178b","Type":"ContainerStarted","Data":"c8fc6fc2e8aebfa3ad7ad05d75564e90f474070a6a6ed5ff2be4d5f3a5a7c187"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.300809 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xt8gz" event={"ID":"7f7716ca-ea2e-45ea-a861-d592b5c077ac","Type":"ContainerStarted","Data":"cbc609a9d147293320633b9b06c3b24ae05c390cd349be2ec7867eb9670c8573"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.302615 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-vm8rp" event={"ID":"a533fee8-aa25-4eae-9eff-134d4b61033f","Type":"ContainerStarted","Data":"ecd4779fa2c4680f93762ef1109929ae848653c19afd35dc681f2518a5646c12"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.302659 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-vm8rp" event={"ID":"a533fee8-aa25-4eae-9eff-134d4b61033f","Type":"ContainerStarted","Data":"ee53cb80918b15c15a39597e0cdf10e2a4646d9c19d0a17cd8987bb0cf35ac5c"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.302774 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.303624 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" event={"ID":"c7f23b6b-b7a1-46f4-b553-37be88f8fae0","Type":"ContainerStarted","Data":"9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.305335 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-bnmt9" event={"ID":"99f0a410-2f12-45d0-8b95-23552288ee49","Type":"ContainerStarted","Data":"ad4a6370a9fcae6de28722aa6b278881489d99d7101e86824ea3e96bfd3acd53"} Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.306793 4663 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-24p95 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.306825 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.309537 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.311993 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.313038 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.344357 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f822dbf-f355-45f9-b293-f1ab29619ed7-catalog-content\") pod \"certified-operators-p4fsq\" (UID: \"8f822dbf-f355-45f9-b293-f1ab29619ed7\") " pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.344452 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4rjd\" (UniqueName: \"kubernetes.io/projected/8f822dbf-f355-45f9-b293-f1ab29619ed7-kube-api-access-w4rjd\") pod \"certified-operators-p4fsq\" (UID: \"8f822dbf-f355-45f9-b293-f1ab29619ed7\") " pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.344688 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f822dbf-f355-45f9-b293-f1ab29619ed7-utilities\") pod \"certified-operators-p4fsq\" (UID: \"8f822dbf-f355-45f9-b293-f1ab29619ed7\") " pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.345214 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.351498 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f822dbf-f355-45f9-b293-f1ab29619ed7-catalog-content\") pod \"certified-operators-p4fsq\" (UID: \"8f822dbf-f355-45f9-b293-f1ab29619ed7\") " pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.355403 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-25wmw" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.355412 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f822dbf-f355-45f9-b293-f1ab29619ed7-utilities\") pod \"certified-operators-p4fsq\" (UID: \"8f822dbf-f355-45f9-b293-f1ab29619ed7\") " pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.369173 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.869063806 +0000 UTC m=+140.686993590 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.419478 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" podStartSLOduration=123.419449447 podStartE2EDuration="2m3.419449447s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:09.41147352 +0000 UTC m=+140.229403304" watchObservedRunningTime="2025-12-11 10:20:09.419449447 +0000 UTC m=+140.237379231" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.420412 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ql2bc"] Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.421489 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.421932 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4rjd\" (UniqueName: \"kubernetes.io/projected/8f822dbf-f355-45f9-b293-f1ab29619ed7-kube-api-access-w4rjd\") pod \"certified-operators-p4fsq\" (UID: \"8f822dbf-f355-45f9-b293-f1ab29619ed7\") " pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.440970 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ql2bc"] Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.458008 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.458225 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.958208504 +0000 UTC m=+140.776138289 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.458445 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6c8ac96-598d-4a04-845d-84ed9cf435cf-catalog-content\") pod \"community-operators-ql2bc\" (UID: \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\") " pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.458576 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2bzg\" (UniqueName: \"kubernetes.io/projected/c6c8ac96-598d-4a04-845d-84ed9cf435cf-kube-api-access-h2bzg\") pod \"community-operators-ql2bc\" (UID: \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\") " pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.458662 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6c8ac96-598d-4a04-845d-84ed9cf435cf-utilities\") pod \"community-operators-ql2bc\" (UID: \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\") " pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.458699 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.459009 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:09.958997422 +0000 UTC m=+140.776927206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.508514 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.526469 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.526492 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.557659 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-vm8rp" podStartSLOduration=7.557642314 podStartE2EDuration="7.557642314s" podCreationTimestamp="2025-12-11 10:20:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:09.557583422 +0000 UTC m=+140.375513206" watchObservedRunningTime="2025-12-11 10:20:09.557642314 +0000 UTC m=+140.375572098" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.566999 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.567290 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6c8ac96-598d-4a04-845d-84ed9cf435cf-catalog-content\") pod \"community-operators-ql2bc\" (UID: \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\") " pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.567383 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2bzg\" (UniqueName: \"kubernetes.io/projected/c6c8ac96-598d-4a04-845d-84ed9cf435cf-kube-api-access-h2bzg\") pod \"community-operators-ql2bc\" (UID: \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\") " pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.567448 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6c8ac96-598d-4a04-845d-84ed9cf435cf-utilities\") pod \"community-operators-ql2bc\" (UID: \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\") " pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.568121 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6c8ac96-598d-4a04-845d-84ed9cf435cf-utilities\") pod \"community-operators-ql2bc\" (UID: \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\") " pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.568210 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:10.068176127 +0000 UTC m=+140.886105911 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.568420 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6c8ac96-598d-4a04-845d-84ed9cf435cf-catalog-content\") pod \"community-operators-ql2bc\" (UID: \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\") " pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.615066 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.615129 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.621832 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fdgt7"] Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.622944 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.635632 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.668056 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2bzg\" (UniqueName: \"kubernetes.io/projected/c6c8ac96-598d-4a04-845d-84ed9cf435cf-kube-api-access-h2bzg\") pod \"community-operators-ql2bc\" (UID: \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\") " pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.669951 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.670005 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw92w\" (UniqueName: \"kubernetes.io/projected/a0d64b13-fea1-428a-a013-5f48ee4b93b7-kube-api-access-tw92w\") pod \"certified-operators-fdgt7\" (UID: \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\") " pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.670036 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0d64b13-fea1-428a-a013-5f48ee4b93b7-catalog-content\") pod \"certified-operators-fdgt7\" (UID: \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\") " pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.670105 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0d64b13-fea1-428a-a013-5f48ee4b93b7-utilities\") pod \"certified-operators-fdgt7\" (UID: \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\") " pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.670494 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:10.170478278 +0000 UTC m=+140.988408062 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.681579 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fdgt7"] Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.693492 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-cb2kn" podStartSLOduration=122.693477545 podStartE2EDuration="2m2.693477545s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:09.690522325 +0000 UTC m=+140.508452109" watchObservedRunningTime="2025-12-11 10:20:09.693477545 +0000 UTC m=+140.511407330" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.771552 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.771919 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw92w\" (UniqueName: \"kubernetes.io/projected/a0d64b13-fea1-428a-a013-5f48ee4b93b7-kube-api-access-tw92w\") pod \"certified-operators-fdgt7\" (UID: \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\") " pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.772028 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0d64b13-fea1-428a-a013-5f48ee4b93b7-catalog-content\") pod \"certified-operators-fdgt7\" (UID: \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\") " pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.772136 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0d64b13-fea1-428a-a013-5f48ee4b93b7-utilities\") pod \"certified-operators-fdgt7\" (UID: \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\") " pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.772669 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0d64b13-fea1-428a-a013-5f48ee4b93b7-utilities\") pod \"certified-operators-fdgt7\" (UID: \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\") " pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.773011 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:10.272998557 +0000 UTC m=+141.090928341 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.773242 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0d64b13-fea1-428a-a013-5f48ee4b93b7-catalog-content\") pod \"certified-operators-fdgt7\" (UID: \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\") " pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.793536 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.820652 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw92w\" (UniqueName: \"kubernetes.io/projected/a0d64b13-fea1-428a-a013-5f48ee4b93b7-kube-api-access-tw92w\") pod \"certified-operators-fdgt7\" (UID: \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\") " pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.873910 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.874286 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:10.374276258 +0000 UTC m=+141.192206042 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.930678 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mlbtj"] Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.976721 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.977130 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:10.477116162 +0000 UTC m=+141.295045946 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.977300 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:09 crc kubenswrapper[4663]: E1211 10:20:09.977575 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:10.477567848 +0000 UTC m=+141.295497632 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.990470 4663 patch_prober.go:28] interesting pod/router-default-5444994796-n9pwg container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 10:20:09 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 11 10:20:09 crc kubenswrapper[4663]: [+]process-running ok Dec 11 10:20:09 crc kubenswrapper[4663]: healthz check failed Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.990521 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n9pwg" podUID="3a676c5e-7428-4f37-9aa2-0755611c6ff3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 10:20:09 crc kubenswrapper[4663]: I1211 10:20:09.998017 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.009116 4663 patch_prober.go:28] interesting pod/apiserver-76f77b778f-9cvs5 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 11 10:20:10 crc kubenswrapper[4663]: [+]log ok Dec 11 10:20:10 crc kubenswrapper[4663]: [+]etcd ok Dec 11 10:20:10 crc kubenswrapper[4663]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 11 10:20:10 crc kubenswrapper[4663]: [+]poststarthook/generic-apiserver-start-informers ok Dec 11 10:20:10 crc kubenswrapper[4663]: [+]poststarthook/max-in-flight-filter ok Dec 11 10:20:10 crc kubenswrapper[4663]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 11 10:20:10 crc kubenswrapper[4663]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 11 10:20:10 crc kubenswrapper[4663]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 11 10:20:10 crc kubenswrapper[4663]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 11 10:20:10 crc kubenswrapper[4663]: [+]poststarthook/project.openshift.io-projectcache ok Dec 11 10:20:10 crc kubenswrapper[4663]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 11 10:20:10 crc kubenswrapper[4663]: [+]poststarthook/openshift.io-startinformers ok Dec 11 10:20:10 crc kubenswrapper[4663]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 11 10:20:10 crc kubenswrapper[4663]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 11 10:20:10 crc kubenswrapper[4663]: livez check failed Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.009173 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" podUID="26fa94f3-6f45-4641-a0b5-b027b7db54c3" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.086416 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:10 crc kubenswrapper[4663]: E1211 10:20:10.087102 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:10.587078744 +0000 UTC m=+141.405008528 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.190695 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:10 crc kubenswrapper[4663]: E1211 10:20:10.191228 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:10.691215468 +0000 UTC m=+141.509145252 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.293835 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:10 crc kubenswrapper[4663]: E1211 10:20:10.294211 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:10.794196082 +0000 UTC m=+141.612125865 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.374340 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mlbtj" event={"ID":"3b63171f-dadd-465c-98c6-0197a59336a9","Type":"ContainerStarted","Data":"e80892a168182d6cd7cfc7ecb53605d2b4e3389551bb6eaaaa54ef1996ba4819"} Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.395294 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:10 crc kubenswrapper[4663]: E1211 10:20:10.395786 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:10.895774348 +0000 UTC m=+141.713704132 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.432938 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" event={"ID":"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f","Type":"ContainerStarted","Data":"c5cfa728dde8d62cba27f5af8b632c3311d4c345e9c9940bcb0519f3096f7db8"} Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.432977 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" event={"ID":"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f","Type":"ContainerStarted","Data":"f96358a1d99929e26271c1b313de51782aa0212099b661b81e11a44f06189434"} Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.451986 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p4fsq"] Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.460103 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.460223 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hfmnw" Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.461846 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ql2bc"] Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.496614 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:10 crc kubenswrapper[4663]: E1211 10:20:10.497992 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:10.997980002 +0000 UTC m=+141.815909786 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.557448 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fdgt7"] Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.598758 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:10 crc kubenswrapper[4663]: E1211 10:20:10.599328 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.099317535 +0000 UTC m=+141.917247319 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.645410 4663 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.702821 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:10 crc kubenswrapper[4663]: E1211 10:20:10.703271 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.203255442 +0000 UTC m=+142.021185226 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.805133 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:10 crc kubenswrapper[4663]: E1211 10:20:10.805592 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.305578025 +0000 UTC m=+142.123507809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.905933 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:10 crc kubenswrapper[4663]: E1211 10:20:10.906513 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.406494094 +0000 UTC m=+142.224423878 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.976908 4663 patch_prober.go:28] interesting pod/router-default-5444994796-n9pwg container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 10:20:10 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 11 10:20:10 crc kubenswrapper[4663]: [+]process-running ok Dec 11 10:20:10 crc kubenswrapper[4663]: healthz check failed Dec 11 10:20:10 crc kubenswrapper[4663]: I1211 10:20:10.976977 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n9pwg" podUID="3a676c5e-7428-4f37-9aa2-0755611c6ff3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.008385 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:11 crc kubenswrapper[4663]: E1211 10:20:11.008761 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.508746494 +0000 UTC m=+142.326676278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.109381 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:11 crc kubenswrapper[4663]: E1211 10:20:11.109545 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.609513056 +0000 UTC m=+142.427442840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.109815 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:11 crc kubenswrapper[4663]: E1211 10:20:11.110209 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.610199144 +0000 UTC m=+142.428128927 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.174851 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cblmm"] Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.175984 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.178317 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.184170 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cblmm"] Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.210980 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:11 crc kubenswrapper[4663]: E1211 10:20:11.211120 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.711095211 +0000 UTC m=+142.529024995 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.211213 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4abc7b2-958e-41fb-9bae-b8dd427179e2-utilities\") pod \"redhat-marketplace-cblmm\" (UID: \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\") " pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.211255 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4abc7b2-958e-41fb-9bae-b8dd427179e2-catalog-content\") pod \"redhat-marketplace-cblmm\" (UID: \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\") " pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.211395 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpccm\" (UniqueName: \"kubernetes.io/projected/b4abc7b2-958e-41fb-9bae-b8dd427179e2-kube-api-access-xpccm\") pod \"redhat-marketplace-cblmm\" (UID: \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\") " pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.211442 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:11 crc kubenswrapper[4663]: E1211 10:20:11.211812 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.711799075 +0000 UTC m=+142.529728849 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.313052 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:11 crc kubenswrapper[4663]: E1211 10:20:11.313354 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.813309864 +0000 UTC m=+142.631239648 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.313556 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4abc7b2-958e-41fb-9bae-b8dd427179e2-utilities\") pod \"redhat-marketplace-cblmm\" (UID: \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\") " pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.313634 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4abc7b2-958e-41fb-9bae-b8dd427179e2-catalog-content\") pod \"redhat-marketplace-cblmm\" (UID: \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\") " pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.313807 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpccm\" (UniqueName: \"kubernetes.io/projected/b4abc7b2-958e-41fb-9bae-b8dd427179e2-kube-api-access-xpccm\") pod \"redhat-marketplace-cblmm\" (UID: \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\") " pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.313855 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:11 crc kubenswrapper[4663]: E1211 10:20:11.314286 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.814270013 +0000 UTC m=+142.632199796 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.314698 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4abc7b2-958e-41fb-9bae-b8dd427179e2-utilities\") pod \"redhat-marketplace-cblmm\" (UID: \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\") " pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.314923 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4abc7b2-958e-41fb-9bae-b8dd427179e2-catalog-content\") pod \"redhat-marketplace-cblmm\" (UID: \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\") " pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.337689 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpccm\" (UniqueName: \"kubernetes.io/projected/b4abc7b2-958e-41fb-9bae-b8dd427179e2-kube-api-access-xpccm\") pod \"redhat-marketplace-cblmm\" (UID: \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\") " pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.415730 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:11 crc kubenswrapper[4663]: E1211 10:20:11.416041 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.916010772 +0000 UTC m=+142.733940557 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.416440 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:11 crc kubenswrapper[4663]: E1211 10:20:11.416795 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:11.916787216 +0000 UTC m=+142.734716999 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.439745 4663 generic.go:334] "Generic (PLEG): container finished" podID="3b63171f-dadd-465c-98c6-0197a59336a9" containerID="616eb56f042616855a8b1acc6bc564c965fec62d8e8dcb55a2d1cc4bab59e4b6" exitCode=0 Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.439837 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mlbtj" event={"ID":"3b63171f-dadd-465c-98c6-0197a59336a9","Type":"ContainerDied","Data":"616eb56f042616855a8b1acc6bc564c965fec62d8e8dcb55a2d1cc4bab59e4b6"} Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.441917 4663 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.443517 4663 generic.go:334] "Generic (PLEG): container finished" podID="a0d64b13-fea1-428a-a013-5f48ee4b93b7" containerID="3ec7e8b731488832a0705cee56412e80179708266614250c3a1228b848fda0c3" exitCode=0 Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.443962 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdgt7" event={"ID":"a0d64b13-fea1-428a-a013-5f48ee4b93b7","Type":"ContainerDied","Data":"3ec7e8b731488832a0705cee56412e80179708266614250c3a1228b848fda0c3"} Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.444009 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdgt7" event={"ID":"a0d64b13-fea1-428a-a013-5f48ee4b93b7","Type":"ContainerStarted","Data":"04294fc73a86f6d073a184a76c7074e05d4c1411515d10ef1932a9c848f3029d"} Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.446551 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" event={"ID":"47f9ce8e-00cf-4b1f-9a90-816f9eb7338f","Type":"ContainerStarted","Data":"ac7692c42e924ca8925a9032db63b84217b82146058095729b3a5b9e69a6d13c"} Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.447998 4663 generic.go:334] "Generic (PLEG): container finished" podID="8f822dbf-f355-45f9-b293-f1ab29619ed7" containerID="3c2bbae7fab8c3c96ddfa5b95e85778053eef81b80831de9512d539e4eed0bbd" exitCode=0 Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.448071 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4fsq" event={"ID":"8f822dbf-f355-45f9-b293-f1ab29619ed7","Type":"ContainerDied","Data":"3c2bbae7fab8c3c96ddfa5b95e85778053eef81b80831de9512d539e4eed0bbd"} Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.448090 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4fsq" event={"ID":"8f822dbf-f355-45f9-b293-f1ab29619ed7","Type":"ContainerStarted","Data":"3e90f4614ee3644de5f25432cf2fc9e0bb8ca73c59c2bb7eb230069ea917fb64"} Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.449958 4663 generic.go:334] "Generic (PLEG): container finished" podID="c6c8ac96-598d-4a04-845d-84ed9cf435cf" containerID="693ef8a18f38188ebf4eff0b016b5322e8320f0fb612cdb17f85cfca376d0e08" exitCode=0 Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.451111 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql2bc" event={"ID":"c6c8ac96-598d-4a04-845d-84ed9cf435cf","Type":"ContainerDied","Data":"693ef8a18f38188ebf4eff0b016b5322e8320f0fb612cdb17f85cfca376d0e08"} Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.451158 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql2bc" event={"ID":"c6c8ac96-598d-4a04-845d-84ed9cf435cf","Type":"ContainerStarted","Data":"c785ab1054e29c2b2759936fb016b80b0a97dc30b74af9505080e50c14163350"} Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.500472 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.503160 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-9nsfq" podStartSLOduration=9.50314149 podStartE2EDuration="9.50314149s" podCreationTimestamp="2025-12-11 10:20:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:11.500370128 +0000 UTC m=+142.318299912" watchObservedRunningTime="2025-12-11 10:20:11.50314149 +0000 UTC m=+142.321071275" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.518244 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:11 crc kubenswrapper[4663]: E1211 10:20:11.519274 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 10:20:12.019252451 +0000 UTC m=+142.837182236 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.579287 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tcx5r"] Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.582056 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.597454 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcx5r"] Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.620650 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbdpw\" (UniqueName: \"kubernetes.io/projected/d71b3f6f-1e97-4471-a823-4e04a0e0c684-kube-api-access-sbdpw\") pod \"redhat-marketplace-tcx5r\" (UID: \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\") " pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.620706 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d71b3f6f-1e97-4471-a823-4e04a0e0c684-utilities\") pod \"redhat-marketplace-tcx5r\" (UID: \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\") " pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.620781 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.620985 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d71b3f6f-1e97-4471-a823-4e04a0e0c684-catalog-content\") pod \"redhat-marketplace-tcx5r\" (UID: \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\") " pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:11 crc kubenswrapper[4663]: E1211 10:20:11.621063 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 10:20:12.121049928 +0000 UTC m=+142.938979713 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mxhbc" (UID: "62062293-e497-4499-b728-226260dc0ae1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.643321 4663 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-11T10:20:10.645447228Z","Handler":null,"Name":""} Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.649994 4663 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.650082 4663 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.661990 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cblmm"] Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.722579 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.722952 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d71b3f6f-1e97-4471-a823-4e04a0e0c684-catalog-content\") pod \"redhat-marketplace-tcx5r\" (UID: \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\") " pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.723033 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbdpw\" (UniqueName: \"kubernetes.io/projected/d71b3f6f-1e97-4471-a823-4e04a0e0c684-kube-api-access-sbdpw\") pod \"redhat-marketplace-tcx5r\" (UID: \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\") " pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.723063 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d71b3f6f-1e97-4471-a823-4e04a0e0c684-utilities\") pod \"redhat-marketplace-tcx5r\" (UID: \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\") " pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.723456 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d71b3f6f-1e97-4471-a823-4e04a0e0c684-catalog-content\") pod \"redhat-marketplace-tcx5r\" (UID: \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\") " pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.724389 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d71b3f6f-1e97-4471-a823-4e04a0e0c684-utilities\") pod \"redhat-marketplace-tcx5r\" (UID: \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\") " pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.726771 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.739725 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbdpw\" (UniqueName: \"kubernetes.io/projected/d71b3f6f-1e97-4471-a823-4e04a0e0c684-kube-api-access-sbdpw\") pod \"redhat-marketplace-tcx5r\" (UID: \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\") " pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.825045 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.827963 4663 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.827997 4663 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.846749 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mxhbc\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.905581 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.976639 4663 patch_prober.go:28] interesting pod/router-default-5444994796-n9pwg container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 10:20:11 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 11 10:20:11 crc kubenswrapper[4663]: [+]process-running ok Dec 11 10:20:11 crc kubenswrapper[4663]: healthz check failed Dec 11 10:20:11 crc kubenswrapper[4663]: I1211 10:20:11.976701 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n9pwg" podUID="3a676c5e-7428-4f37-9aa2-0755611c6ff3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.048896 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcx5r"] Dec 11 10:20:12 crc kubenswrapper[4663]: W1211 10:20:12.054078 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd71b3f6f_1e97_4471_a823_4e04a0e0c684.slice/crio-673faf7eee2c2ce57656ee92f1840baea0d9ecc95e4d08142432fe3d8429979b WatchSource:0}: Error finding container 673faf7eee2c2ce57656ee92f1840baea0d9ecc95e4d08142432fe3d8429979b: Status 404 returned error can't find the container with id 673faf7eee2c2ce57656ee92f1840baea0d9ecc95e4d08142432fe3d8429979b Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.144918 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.175398 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zwkwc"] Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.176578 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.178005 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.186159 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zwkwc"] Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.233251 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ds4bk\" (UniqueName: \"kubernetes.io/projected/2d163437-adb7-4c91-b925-72de9fe71120-kube-api-access-ds4bk\") pod \"redhat-operators-zwkwc\" (UID: \"2d163437-adb7-4c91-b925-72de9fe71120\") " pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.233462 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d163437-adb7-4c91-b925-72de9fe71120-utilities\") pod \"redhat-operators-zwkwc\" (UID: \"2d163437-adb7-4c91-b925-72de9fe71120\") " pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.233551 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d163437-adb7-4c91-b925-72de9fe71120-catalog-content\") pod \"redhat-operators-zwkwc\" (UID: \"2d163437-adb7-4c91-b925-72de9fe71120\") " pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.336802 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d163437-adb7-4c91-b925-72de9fe71120-catalog-content\") pod \"redhat-operators-zwkwc\" (UID: \"2d163437-adb7-4c91-b925-72de9fe71120\") " pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.336919 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ds4bk\" (UniqueName: \"kubernetes.io/projected/2d163437-adb7-4c91-b925-72de9fe71120-kube-api-access-ds4bk\") pod \"redhat-operators-zwkwc\" (UID: \"2d163437-adb7-4c91-b925-72de9fe71120\") " pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.336990 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d163437-adb7-4c91-b925-72de9fe71120-utilities\") pod \"redhat-operators-zwkwc\" (UID: \"2d163437-adb7-4c91-b925-72de9fe71120\") " pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.337356 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d163437-adb7-4c91-b925-72de9fe71120-catalog-content\") pod \"redhat-operators-zwkwc\" (UID: \"2d163437-adb7-4c91-b925-72de9fe71120\") " pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.337441 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d163437-adb7-4c91-b925-72de9fe71120-utilities\") pod \"redhat-operators-zwkwc\" (UID: \"2d163437-adb7-4c91-b925-72de9fe71120\") " pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.356539 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ds4bk\" (UniqueName: \"kubernetes.io/projected/2d163437-adb7-4c91-b925-72de9fe71120-kube-api-access-ds4bk\") pod \"redhat-operators-zwkwc\" (UID: \"2d163437-adb7-4c91-b925-72de9fe71120\") " pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.465961 4663 generic.go:334] "Generic (PLEG): container finished" podID="b15591c3-541e-4daf-8e21-9b69db5cf260" containerID="28a4a8a3f9c056c7f5d084e5abddd28f327fad286bc30422a8154a3036396db5" exitCode=0 Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.466028 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" event={"ID":"b15591c3-541e-4daf-8e21-9b69db5cf260","Type":"ContainerDied","Data":"28a4a8a3f9c056c7f5d084e5abddd28f327fad286bc30422a8154a3036396db5"} Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.470170 4663 generic.go:334] "Generic (PLEG): container finished" podID="b4abc7b2-958e-41fb-9bae-b8dd427179e2" containerID="ded69ee3b1c21490ba5c44288cc79dec1c8dad1868c89baac459c59098c0b15b" exitCode=0 Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.470318 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cblmm" event={"ID":"b4abc7b2-958e-41fb-9bae-b8dd427179e2","Type":"ContainerDied","Data":"ded69ee3b1c21490ba5c44288cc79dec1c8dad1868c89baac459c59098c0b15b"} Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.470383 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cblmm" event={"ID":"b4abc7b2-958e-41fb-9bae-b8dd427179e2","Type":"ContainerStarted","Data":"e089992e85b0c452ff2404ea8576fa96b2cfee6a82c5084c7d0d83fb561157e0"} Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.471898 4663 generic.go:334] "Generic (PLEG): container finished" podID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" containerID="b7f021f191a98349a7a1591861195a3e6b946609f0a54d7af02ce72b032d177a" exitCode=0 Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.471995 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcx5r" event={"ID":"d71b3f6f-1e97-4471-a823-4e04a0e0c684","Type":"ContainerDied","Data":"b7f021f191a98349a7a1591861195a3e6b946609f0a54d7af02ce72b032d177a"} Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.472034 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcx5r" event={"ID":"d71b3f6f-1e97-4471-a823-4e04a0e0c684","Type":"ContainerStarted","Data":"673faf7eee2c2ce57656ee92f1840baea0d9ecc95e4d08142432fe3d8429979b"} Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.517040 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-mxhbc"] Dec 11 10:20:12 crc kubenswrapper[4663]: W1211 10:20:12.527663 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62062293_e497_4499_b728_226260dc0ae1.slice/crio-a0cff407b9879f0f99547db5d34349d1bc163b9c806b95dc8aa4ac2f5e0510cf WatchSource:0}: Error finding container a0cff407b9879f0f99547db5d34349d1bc163b9c806b95dc8aa4ac2f5e0510cf: Status 404 returned error can't find the container with id a0cff407b9879f0f99547db5d34349d1bc163b9c806b95dc8aa4ac2f5e0510cf Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.557487 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.593826 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xlbqp"] Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.595196 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.605099 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xlbqp"] Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.642561 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqltw\" (UniqueName: \"kubernetes.io/projected/d2fb168f-d6a2-4800-bf80-01e7167cea09-kube-api-access-dqltw\") pod \"redhat-operators-xlbqp\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.642760 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-catalog-content\") pod \"redhat-operators-xlbqp\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.643523 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-utilities\") pod \"redhat-operators-xlbqp\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.745447 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-utilities\") pod \"redhat-operators-xlbqp\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.745565 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqltw\" (UniqueName: \"kubernetes.io/projected/d2fb168f-d6a2-4800-bf80-01e7167cea09-kube-api-access-dqltw\") pod \"redhat-operators-xlbqp\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.745693 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-catalog-content\") pod \"redhat-operators-xlbqp\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.746116 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-catalog-content\") pod \"redhat-operators-xlbqp\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.746635 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-utilities\") pod \"redhat-operators-xlbqp\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.769525 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqltw\" (UniqueName: \"kubernetes.io/projected/d2fb168f-d6a2-4800-bf80-01e7167cea09-kube-api-access-dqltw\") pod \"redhat-operators-xlbqp\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.924552 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.980666 4663 patch_prober.go:28] interesting pod/router-default-5444994796-n9pwg container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 10:20:12 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 11 10:20:12 crc kubenswrapper[4663]: [+]process-running ok Dec 11 10:20:12 crc kubenswrapper[4663]: healthz check failed Dec 11 10:20:12 crc kubenswrapper[4663]: I1211 10:20:12.980793 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n9pwg" podUID="3a676c5e-7428-4f37-9aa2-0755611c6ff3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.010748 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zwkwc"] Dec 11 10:20:13 crc kubenswrapper[4663]: W1211 10:20:13.045553 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d163437_adb7_4c91_b925_72de9fe71120.slice/crio-1c0ba465bd0a58bea91ab523ddef6169a1eac7ffabea7adcc3ff73e51dfb4780 WatchSource:0}: Error finding container 1c0ba465bd0a58bea91ab523ddef6169a1eac7ffabea7adcc3ff73e51dfb4780: Status 404 returned error can't find the container with id 1c0ba465bd0a58bea91ab523ddef6169a1eac7ffabea7adcc3ff73e51dfb4780 Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.207494 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xlbqp"] Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.270423 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.272604 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.274252 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.274560 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.277520 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.356302 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/29412828-843a-414f-acc0-c130aa563203-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"29412828-843a-414f-acc0-c130aa563203\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.356351 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/29412828-843a-414f-acc0-c130aa563203-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"29412828-843a-414f-acc0-c130aa563203\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.457395 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/29412828-843a-414f-acc0-c130aa563203-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"29412828-843a-414f-acc0-c130aa563203\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.457460 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/29412828-843a-414f-acc0-c130aa563203-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"29412828-843a-414f-acc0-c130aa563203\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.457841 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/29412828-843a-414f-acc0-c130aa563203-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"29412828-843a-414f-acc0-c130aa563203\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.475214 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/29412828-843a-414f-acc0-c130aa563203-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"29412828-843a-414f-acc0-c130aa563203\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.481656 4663 generic.go:334] "Generic (PLEG): container finished" podID="d2fb168f-d6a2-4800-bf80-01e7167cea09" containerID="90929084e76cbe40ab0af0b2f59c9103c99644ed21c5a0d5ebc4ab9200bd8131" exitCode=0 Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.481733 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlbqp" event={"ID":"d2fb168f-d6a2-4800-bf80-01e7167cea09","Type":"ContainerDied","Data":"90929084e76cbe40ab0af0b2f59c9103c99644ed21c5a0d5ebc4ab9200bd8131"} Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.481841 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlbqp" event={"ID":"d2fb168f-d6a2-4800-bf80-01e7167cea09","Type":"ContainerStarted","Data":"fd55aad026e7f012d4476e8641e2d1ef6d522241b6e6d27806f2dd2b3fcdcb2c"} Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.485516 4663 generic.go:334] "Generic (PLEG): container finished" podID="2d163437-adb7-4c91-b925-72de9fe71120" containerID="03a5adacffb6b69d3c7f0448489abe42ed6828ee2e2524aa761d2d3e924a20b2" exitCode=0 Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.485583 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwkwc" event={"ID":"2d163437-adb7-4c91-b925-72de9fe71120","Type":"ContainerDied","Data":"03a5adacffb6b69d3c7f0448489abe42ed6828ee2e2524aa761d2d3e924a20b2"} Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.485602 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwkwc" event={"ID":"2d163437-adb7-4c91-b925-72de9fe71120","Type":"ContainerStarted","Data":"1c0ba465bd0a58bea91ab523ddef6169a1eac7ffabea7adcc3ff73e51dfb4780"} Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.488324 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" event={"ID":"62062293-e497-4499-b728-226260dc0ae1","Type":"ContainerStarted","Data":"c91f81e461b2d76472d0aa7b28b5281c66994943f75f682906c7e27a750cb196"} Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.488349 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" event={"ID":"62062293-e497-4499-b728-226260dc0ae1","Type":"ContainerStarted","Data":"a0cff407b9879f0f99547db5d34349d1bc163b9c806b95dc8aa4ac2f5e0510cf"} Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.488361 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.523013 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" podStartSLOduration=126.522953008 podStartE2EDuration="2m6.522953008s" podCreationTimestamp="2025-12-11 10:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:13.520694848 +0000 UTC m=+144.338624622" watchObservedRunningTime="2025-12-11 10:20:13.522953008 +0000 UTC m=+144.340882792" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.523918 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.650225 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.678037 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.762007 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwbrw\" (UniqueName: \"kubernetes.io/projected/b15591c3-541e-4daf-8e21-9b69db5cf260-kube-api-access-dwbrw\") pod \"b15591c3-541e-4daf-8e21-9b69db5cf260\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.762089 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b15591c3-541e-4daf-8e21-9b69db5cf260-secret-volume\") pod \"b15591c3-541e-4daf-8e21-9b69db5cf260\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.762151 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b15591c3-541e-4daf-8e21-9b69db5cf260-config-volume\") pod \"b15591c3-541e-4daf-8e21-9b69db5cf260\" (UID: \"b15591c3-541e-4daf-8e21-9b69db5cf260\") " Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.763881 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b15591c3-541e-4daf-8e21-9b69db5cf260-config-volume" (OuterVolumeSpecName: "config-volume") pod "b15591c3-541e-4daf-8e21-9b69db5cf260" (UID: "b15591c3-541e-4daf-8e21-9b69db5cf260"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.767357 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b15591c3-541e-4daf-8e21-9b69db5cf260-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b15591c3-541e-4daf-8e21-9b69db5cf260" (UID: "b15591c3-541e-4daf-8e21-9b69db5cf260"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.768672 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b15591c3-541e-4daf-8e21-9b69db5cf260-kube-api-access-dwbrw" (OuterVolumeSpecName: "kube-api-access-dwbrw") pod "b15591c3-541e-4daf-8e21-9b69db5cf260" (UID: "b15591c3-541e-4daf-8e21-9b69db5cf260"). InnerVolumeSpecName "kube-api-access-dwbrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.786790 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hvrvb" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.864360 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwbrw\" (UniqueName: \"kubernetes.io/projected/b15591c3-541e-4daf-8e21-9b69db5cf260-kube-api-access-dwbrw\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.864389 4663 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b15591c3-541e-4daf-8e21-9b69db5cf260-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.864400 4663 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b15591c3-541e-4daf-8e21-9b69db5cf260-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.894177 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.976670 4663 patch_prober.go:28] interesting pod/router-default-5444994796-n9pwg container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 10:20:13 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 11 10:20:13 crc kubenswrapper[4663]: [+]process-running ok Dec 11 10:20:13 crc kubenswrapper[4663]: healthz check failed Dec 11 10:20:13 crc kubenswrapper[4663]: I1211 10:20:13.976802 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n9pwg" podUID="3a676c5e-7428-4f37-9aa2-0755611c6ff3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.502860 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"29412828-843a-414f-acc0-c130aa563203","Type":"ContainerStarted","Data":"5e36c6277044d6897b9c9f450242ab2d315aced5b9fef5a410f06980e164e625"} Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.502909 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"29412828-843a-414f-acc0-c130aa563203","Type":"ContainerStarted","Data":"ace43be2942f23baf001a5e4e5f73dc692321a06874193fd321c17e4ad8e7a7c"} Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.507994 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.508318 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-v2t6r" event={"ID":"b15591c3-541e-4daf-8e21-9b69db5cf260","Type":"ContainerDied","Data":"eaa9c4f3b063825b6b6c5bc0975726bc9bbf7677c5558f74f59304063587137f"} Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.508338 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eaa9c4f3b063825b6b6c5bc0975726bc9bbf7677c5558f74f59304063587137f" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.521404 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.524795 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-9cvs5" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.537476 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.537458661 podStartE2EDuration="1.537458661s" podCreationTimestamp="2025-12-11 10:20:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:14.519280601 +0000 UTC m=+145.337210404" watchObservedRunningTime="2025-12-11 10:20:14.537458661 +0000 UTC m=+145.355388445" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.578377 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.578485 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.578548 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.580492 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.594796 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.606554 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.621580 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.681027 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.684775 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.716410 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.928125 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.940811 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-mn2pm" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.976630 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.979136 4663 patch_prober.go:28] interesting pod/router-default-5444994796-n9pwg container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 10:20:14 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 11 10:20:14 crc kubenswrapper[4663]: [+]process-running ok Dec 11 10:20:14 crc kubenswrapper[4663]: healthz check failed Dec 11 10:20:14 crc kubenswrapper[4663]: I1211 10:20:14.979167 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n9pwg" podUID="3a676c5e-7428-4f37-9aa2-0755611c6ff3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 10:20:15 crc kubenswrapper[4663]: W1211 10:20:15.225961 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-04a317e303d69cd56ff85da33c59de8abbfc08f1f6e76caaf3bb6e008e214442 WatchSource:0}: Error finding container 04a317e303d69cd56ff85da33c59de8abbfc08f1f6e76caaf3bb6e008e214442: Status 404 returned error can't find the container with id 04a317e303d69cd56ff85da33c59de8abbfc08f1f6e76caaf3bb6e008e214442 Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.234333 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.234788 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.235566 4663 patch_prober.go:28] interesting pod/console-f9d7485db-dc8cp container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.235623 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dc8cp" podUID="d18be0a7-2aaa-4aaa-9d37-f44453e994d8" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.514750 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"976773aca80e83097f2d036e2c3c9034472fc8c82e5139eb3f34b200c753a3d1"} Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.515115 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4b6aca9c45783303511b196878497eb2bd8b67478820f5462fb67ae6dfe6c669"} Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.516961 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"bea7b0c5748ecd0e03bda5cebe84886fddb75f5721bb605c9d30b694b57fbf4d"} Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.516980 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"d1963537a58f76845e4c24996eb18ad0dc35f677a04648a944c828f343c1a52d"} Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.522080 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"34ba51dcb43a9e83c559cecc9178e77f601625bac7e9a8c1b3e72e182f8cb753"} Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.522098 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"04a317e303d69cd56ff85da33c59de8abbfc08f1f6e76caaf3bb6e008e214442"} Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.522410 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.523741 4663 generic.go:334] "Generic (PLEG): container finished" podID="29412828-843a-414f-acc0-c130aa563203" containerID="5e36c6277044d6897b9c9f450242ab2d315aced5b9fef5a410f06980e164e625" exitCode=0 Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.524110 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"29412828-843a-414f-acc0-c130aa563203","Type":"ContainerDied","Data":"5e36c6277044d6897b9c9f450242ab2d315aced5b9fef5a410f06980e164e625"} Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.753318 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 10:20:15 crc kubenswrapper[4663]: E1211 10:20:15.753610 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b15591c3-541e-4daf-8e21-9b69db5cf260" containerName="collect-profiles" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.753624 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="b15591c3-541e-4daf-8e21-9b69db5cf260" containerName="collect-profiles" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.753736 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="b15591c3-541e-4daf-8e21-9b69db5cf260" containerName="collect-profiles" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.754332 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.756810 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.756845 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.788242 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.809870 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.809916 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.911325 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.911370 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.911683 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.933469 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.979333 4663 patch_prober.go:28] interesting pod/router-default-5444994796-n9pwg container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 10:20:15 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 11 10:20:15 crc kubenswrapper[4663]: [+]process-running ok Dec 11 10:20:15 crc kubenswrapper[4663]: healthz check failed Dec 11 10:20:15 crc kubenswrapper[4663]: I1211 10:20:15.979392 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-n9pwg" podUID="3a676c5e-7428-4f37-9aa2-0755611c6ff3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 10:20:16 crc kubenswrapper[4663]: I1211 10:20:16.078104 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 10:20:16 crc kubenswrapper[4663]: I1211 10:20:16.975796 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:16 crc kubenswrapper[4663]: I1211 10:20:16.978229 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-n9pwg" Dec 11 10:20:17 crc kubenswrapper[4663]: I1211 10:20:17.781009 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-vm8rp" Dec 11 10:20:19 crc kubenswrapper[4663]: I1211 10:20:19.271126 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:20:19 crc kubenswrapper[4663]: I1211 10:20:19.271430 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:20:19 crc kubenswrapper[4663]: I1211 10:20:19.359301 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 10:20:19 crc kubenswrapper[4663]: I1211 10:20:19.384631 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/29412828-843a-414f-acc0-c130aa563203-kube-api-access\") pod \"29412828-843a-414f-acc0-c130aa563203\" (UID: \"29412828-843a-414f-acc0-c130aa563203\") " Dec 11 10:20:19 crc kubenswrapper[4663]: I1211 10:20:19.384666 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/29412828-843a-414f-acc0-c130aa563203-kubelet-dir\") pod \"29412828-843a-414f-acc0-c130aa563203\" (UID: \"29412828-843a-414f-acc0-c130aa563203\") " Dec 11 10:20:19 crc kubenswrapper[4663]: I1211 10:20:19.384815 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29412828-843a-414f-acc0-c130aa563203-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "29412828-843a-414f-acc0-c130aa563203" (UID: "29412828-843a-414f-acc0-c130aa563203"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:20:19 crc kubenswrapper[4663]: I1211 10:20:19.385140 4663 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/29412828-843a-414f-acc0-c130aa563203-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:19 crc kubenswrapper[4663]: I1211 10:20:19.390523 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29412828-843a-414f-acc0-c130aa563203-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "29412828-843a-414f-acc0-c130aa563203" (UID: "29412828-843a-414f-acc0-c130aa563203"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:20:19 crc kubenswrapper[4663]: I1211 10:20:19.486168 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/29412828-843a-414f-acc0-c130aa563203-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:19 crc kubenswrapper[4663]: I1211 10:20:19.571588 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"29412828-843a-414f-acc0-c130aa563203","Type":"ContainerDied","Data":"ace43be2942f23baf001a5e4e5f73dc692321a06874193fd321c17e4ad8e7a7c"} Dec 11 10:20:19 crc kubenswrapper[4663]: I1211 10:20:19.571629 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ace43be2942f23baf001a5e4e5f73dc692321a06874193fd321c17e4ad8e7a7c" Dec 11 10:20:19 crc kubenswrapper[4663]: I1211 10:20:19.571648 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 10:20:22 crc kubenswrapper[4663]: I1211 10:20:22.377409 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 10:20:25 crc kubenswrapper[4663]: I1211 10:20:25.239919 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:25 crc kubenswrapper[4663]: I1211 10:20:25.243848 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:20:28 crc kubenswrapper[4663]: I1211 10:20:28.420104 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:20:28 crc kubenswrapper[4663]: I1211 10:20:28.426126 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/54c64914-8d10-48bc-a755-1f7257400a32-metrics-certs\") pod \"network-metrics-daemon-5r5q6\" (UID: \"54c64914-8d10-48bc-a755-1f7257400a32\") " pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:20:28 crc kubenswrapper[4663]: I1211 10:20:28.520997 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5r5q6" Dec 11 10:20:28 crc kubenswrapper[4663]: I1211 10:20:28.636649 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b","Type":"ContainerStarted","Data":"a44c5ee28692f9043ef72de57c4d05fbeda18afba5d5a84ccc106682d94c6351"} Dec 11 10:20:32 crc kubenswrapper[4663]: I1211 10:20:32.150220 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:20:32 crc kubenswrapper[4663]: E1211 10:20:32.670499 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 11 10:20:32 crc kubenswrapper[4663]: E1211 10:20:32.670901 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sbdpw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-tcx5r_openshift-marketplace(d71b3f6f-1e97-4471-a823-4e04a0e0c684): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 10:20:32 crc kubenswrapper[4663]: E1211 10:20:32.672042 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-tcx5r" podUID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" Dec 11 10:20:33 crc kubenswrapper[4663]: E1211 10:20:33.936586 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-tcx5r" podUID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" Dec 11 10:20:35 crc kubenswrapper[4663]: I1211 10:20:35.472348 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-5r5q6"] Dec 11 10:20:35 crc kubenswrapper[4663]: I1211 10:20:35.690933 4663 generic.go:334] "Generic (PLEG): container finished" podID="b4abc7b2-958e-41fb-9bae-b8dd427179e2" containerID="e518db8bb39eb363a88b430652e007bb6f29a0c13c177b5af52047eb6615c06c" exitCode=0 Dec 11 10:20:35 crc kubenswrapper[4663]: I1211 10:20:35.691099 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cblmm" event={"ID":"b4abc7b2-958e-41fb-9bae-b8dd427179e2","Type":"ContainerDied","Data":"e518db8bb39eb363a88b430652e007bb6f29a0c13c177b5af52047eb6615c06c"} Dec 11 10:20:35 crc kubenswrapper[4663]: I1211 10:20:35.701705 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdgt7" event={"ID":"a0d64b13-fea1-428a-a013-5f48ee4b93b7","Type":"ContainerStarted","Data":"41b5bd8a79b9634a399c08f4e6201b791d15a383a68d0450dc9dfee75b1b0ebc"} Dec 11 10:20:35 crc kubenswrapper[4663]: I1211 10:20:35.706353 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwkwc" event={"ID":"2d163437-adb7-4c91-b925-72de9fe71120","Type":"ContainerStarted","Data":"85baf9b9531d2056d4d938214bebce24fa3783d64510209a7231e8271512f1dc"} Dec 11 10:20:35 crc kubenswrapper[4663]: I1211 10:20:35.711345 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql2bc" event={"ID":"c6c8ac96-598d-4a04-845d-84ed9cf435cf","Type":"ContainerStarted","Data":"2c279a3bca5fcce5047b8902699ee59316640fa3a04ae11eb47c0ac2a34a2ac7"} Dec 11 10:20:35 crc kubenswrapper[4663]: I1211 10:20:35.715093 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mlbtj" event={"ID":"3b63171f-dadd-465c-98c6-0197a59336a9","Type":"ContainerStarted","Data":"6bbf25dad89e75e6a742bc74737f94dbb54578cf34f9c05fe5078f0570d665d6"} Dec 11 10:20:35 crc kubenswrapper[4663]: I1211 10:20:35.719294 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlbqp" event={"ID":"d2fb168f-d6a2-4800-bf80-01e7167cea09","Type":"ContainerStarted","Data":"5a6c36c909dcf2d9be29d805b70b4b5738fbe014cbbfb3cc2f3449a1f08803d2"} Dec 11 10:20:35 crc kubenswrapper[4663]: I1211 10:20:35.726546 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4fsq" event={"ID":"8f822dbf-f355-45f9-b293-f1ab29619ed7","Type":"ContainerStarted","Data":"f2bc57cc9591bd870efed63b1fd71c8fa88298d9f81fe5d08b35aa53dab76e37"} Dec 11 10:20:35 crc kubenswrapper[4663]: I1211 10:20:35.732736 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b","Type":"ContainerStarted","Data":"6118a1ec5543e3a6f9c24da3d31476aebb76e21d52531958da42555d048d487d"} Dec 11 10:20:35 crc kubenswrapper[4663]: I1211 10:20:35.734076 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" event={"ID":"54c64914-8d10-48bc-a755-1f7257400a32","Type":"ContainerStarted","Data":"149bc75da88f2da141bb2e410c284f85f3a56a411e1e3218e06008815efcd9fe"} Dec 11 10:20:35 crc kubenswrapper[4663]: I1211 10:20:35.811533 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=20.811490931 podStartE2EDuration="20.811490931s" podCreationTimestamp="2025-12-11 10:20:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:35.809053994 +0000 UTC m=+166.626983768" watchObservedRunningTime="2025-12-11 10:20:35.811490931 +0000 UTC m=+166.629420714" Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.742656 4663 generic.go:334] "Generic (PLEG): container finished" podID="2d163437-adb7-4c91-b925-72de9fe71120" containerID="85baf9b9531d2056d4d938214bebce24fa3783d64510209a7231e8271512f1dc" exitCode=0 Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.742771 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwkwc" event={"ID":"2d163437-adb7-4c91-b925-72de9fe71120","Type":"ContainerDied","Data":"85baf9b9531d2056d4d938214bebce24fa3783d64510209a7231e8271512f1dc"} Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.746391 4663 generic.go:334] "Generic (PLEG): container finished" podID="671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b" containerID="6118a1ec5543e3a6f9c24da3d31476aebb76e21d52531958da42555d048d487d" exitCode=0 Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.746556 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b","Type":"ContainerDied","Data":"6118a1ec5543e3a6f9c24da3d31476aebb76e21d52531958da42555d048d487d"} Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.750133 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cblmm" event={"ID":"b4abc7b2-958e-41fb-9bae-b8dd427179e2","Type":"ContainerStarted","Data":"d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f"} Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.755267 4663 generic.go:334] "Generic (PLEG): container finished" podID="3b63171f-dadd-465c-98c6-0197a59336a9" containerID="6bbf25dad89e75e6a742bc74737f94dbb54578cf34f9c05fe5078f0570d665d6" exitCode=0 Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.755362 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mlbtj" event={"ID":"3b63171f-dadd-465c-98c6-0197a59336a9","Type":"ContainerDied","Data":"6bbf25dad89e75e6a742bc74737f94dbb54578cf34f9c05fe5078f0570d665d6"} Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.757063 4663 generic.go:334] "Generic (PLEG): container finished" podID="d2fb168f-d6a2-4800-bf80-01e7167cea09" containerID="5a6c36c909dcf2d9be29d805b70b4b5738fbe014cbbfb3cc2f3449a1f08803d2" exitCode=0 Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.757109 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlbqp" event={"ID":"d2fb168f-d6a2-4800-bf80-01e7167cea09","Type":"ContainerDied","Data":"5a6c36c909dcf2d9be29d805b70b4b5738fbe014cbbfb3cc2f3449a1f08803d2"} Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.760935 4663 generic.go:334] "Generic (PLEG): container finished" podID="a0d64b13-fea1-428a-a013-5f48ee4b93b7" containerID="41b5bd8a79b9634a399c08f4e6201b791d15a383a68d0450dc9dfee75b1b0ebc" exitCode=0 Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.760994 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdgt7" event={"ID":"a0d64b13-fea1-428a-a013-5f48ee4b93b7","Type":"ContainerDied","Data":"41b5bd8a79b9634a399c08f4e6201b791d15a383a68d0450dc9dfee75b1b0ebc"} Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.764354 4663 generic.go:334] "Generic (PLEG): container finished" podID="8f822dbf-f355-45f9-b293-f1ab29619ed7" containerID="f2bc57cc9591bd870efed63b1fd71c8fa88298d9f81fe5d08b35aa53dab76e37" exitCode=0 Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.764407 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4fsq" event={"ID":"8f822dbf-f355-45f9-b293-f1ab29619ed7","Type":"ContainerDied","Data":"f2bc57cc9591bd870efed63b1fd71c8fa88298d9f81fe5d08b35aa53dab76e37"} Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.767353 4663 generic.go:334] "Generic (PLEG): container finished" podID="c6c8ac96-598d-4a04-845d-84ed9cf435cf" containerID="2c279a3bca5fcce5047b8902699ee59316640fa3a04ae11eb47c0ac2a34a2ac7" exitCode=0 Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.767657 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql2bc" event={"ID":"c6c8ac96-598d-4a04-845d-84ed9cf435cf","Type":"ContainerDied","Data":"2c279a3bca5fcce5047b8902699ee59316640fa3a04ae11eb47c0ac2a34a2ac7"} Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.769798 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" event={"ID":"54c64914-8d10-48bc-a755-1f7257400a32","Type":"ContainerStarted","Data":"f7a3a79411db0758eb0564dab5c901d13e80533e798b034d8e3084a0467900c3"} Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.769824 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-5r5q6" event={"ID":"54c64914-8d10-48bc-a755-1f7257400a32","Type":"ContainerStarted","Data":"dd9fbf25c5ba3d19b9241881da1f9bd818dca0a9ffeab220232866ed82e101e1"} Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.820381 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cblmm" podStartSLOduration=2.007301061 podStartE2EDuration="25.82036647s" podCreationTimestamp="2025-12-11 10:20:11 +0000 UTC" firstStartedPulling="2025-12-11 10:20:12.474259944 +0000 UTC m=+143.292189728" lastFinishedPulling="2025-12-11 10:20:36.287325352 +0000 UTC m=+167.105255137" observedRunningTime="2025-12-11 10:20:36.818989824 +0000 UTC m=+167.636919608" watchObservedRunningTime="2025-12-11 10:20:36.82036647 +0000 UTC m=+167.638296254" Dec 11 10:20:36 crc kubenswrapper[4663]: I1211 10:20:36.848796 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-5r5q6" podStartSLOduration=150.84877775 podStartE2EDuration="2m30.84877775s" podCreationTimestamp="2025-12-11 10:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:36.843238022 +0000 UTC m=+167.661167797" watchObservedRunningTime="2025-12-11 10:20:36.84877775 +0000 UTC m=+167.666707534" Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.775947 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4fsq" event={"ID":"8f822dbf-f355-45f9-b293-f1ab29619ed7","Type":"ContainerStarted","Data":"50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585"} Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.778811 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql2bc" event={"ID":"c6c8ac96-598d-4a04-845d-84ed9cf435cf","Type":"ContainerStarted","Data":"d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071"} Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.782880 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlbqp" event={"ID":"d2fb168f-d6a2-4800-bf80-01e7167cea09","Type":"ContainerStarted","Data":"f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae"} Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.788202 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mlbtj" event={"ID":"3b63171f-dadd-465c-98c6-0197a59336a9","Type":"ContainerStarted","Data":"674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7"} Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.792414 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdgt7" event={"ID":"a0d64b13-fea1-428a-a013-5f48ee4b93b7","Type":"ContainerStarted","Data":"3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9"} Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.793249 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-p4fsq" podStartSLOduration=2.947896792 podStartE2EDuration="28.793232269s" podCreationTimestamp="2025-12-11 10:20:09 +0000 UTC" firstStartedPulling="2025-12-11 10:20:11.449065311 +0000 UTC m=+142.266995095" lastFinishedPulling="2025-12-11 10:20:37.294400788 +0000 UTC m=+168.112330572" observedRunningTime="2025-12-11 10:20:37.79253231 +0000 UTC m=+168.610462094" watchObservedRunningTime="2025-12-11 10:20:37.793232269 +0000 UTC m=+168.611162054" Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.797801 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwkwc" event={"ID":"2d163437-adb7-4c91-b925-72de9fe71120","Type":"ContainerStarted","Data":"fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b"} Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.808869 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xlbqp" podStartSLOduration=1.9744275039999999 podStartE2EDuration="25.808839002s" podCreationTimestamp="2025-12-11 10:20:12 +0000 UTC" firstStartedPulling="2025-12-11 10:20:13.48379802 +0000 UTC m=+144.301727804" lastFinishedPulling="2025-12-11 10:20:37.318209518 +0000 UTC m=+168.136139302" observedRunningTime="2025-12-11 10:20:37.805575627 +0000 UTC m=+168.623505402" watchObservedRunningTime="2025-12-11 10:20:37.808839002 +0000 UTC m=+168.626768786" Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.822361 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mlbtj" podStartSLOduration=3.688138654 podStartE2EDuration="29.822344843s" podCreationTimestamp="2025-12-11 10:20:08 +0000 UTC" firstStartedPulling="2025-12-11 10:20:11.441489936 +0000 UTC m=+142.259419720" lastFinishedPulling="2025-12-11 10:20:37.575696135 +0000 UTC m=+168.393625909" observedRunningTime="2025-12-11 10:20:37.818644444 +0000 UTC m=+168.636574228" watchObservedRunningTime="2025-12-11 10:20:37.822344843 +0000 UTC m=+168.640274627" Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.841228 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fdgt7" podStartSLOduration=3.017683138 podStartE2EDuration="28.841206393s" podCreationTimestamp="2025-12-11 10:20:09 +0000 UTC" firstStartedPulling="2025-12-11 10:20:11.444798421 +0000 UTC m=+142.262728205" lastFinishedPulling="2025-12-11 10:20:37.268321676 +0000 UTC m=+168.086251460" observedRunningTime="2025-12-11 10:20:37.83930651 +0000 UTC m=+168.657236295" watchObservedRunningTime="2025-12-11 10:20:37.841206393 +0000 UTC m=+168.659136177" Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.872066 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ql2bc" podStartSLOduration=2.952817708 podStartE2EDuration="28.872048458s" podCreationTimestamp="2025-12-11 10:20:09 +0000 UTC" firstStartedPulling="2025-12-11 10:20:11.452298812 +0000 UTC m=+142.270228597" lastFinishedPulling="2025-12-11 10:20:37.371529564 +0000 UTC m=+168.189459347" observedRunningTime="2025-12-11 10:20:37.868414144 +0000 UTC m=+168.686343927" watchObservedRunningTime="2025-12-11 10:20:37.872048458 +0000 UTC m=+168.689978242" Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.883142 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zwkwc" podStartSLOduration=1.8821821399999998 podStartE2EDuration="25.883128633s" podCreationTimestamp="2025-12-11 10:20:12 +0000 UTC" firstStartedPulling="2025-12-11 10:20:13.486951798 +0000 UTC m=+144.304881581" lastFinishedPulling="2025-12-11 10:20:37.487898289 +0000 UTC m=+168.305828074" observedRunningTime="2025-12-11 10:20:37.882519234 +0000 UTC m=+168.700449018" watchObservedRunningTime="2025-12-11 10:20:37.883128633 +0000 UTC m=+168.701058418" Dec 11 10:20:37 crc kubenswrapper[4663]: I1211 10:20:37.998990 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 10:20:38 crc kubenswrapper[4663]: I1211 10:20:38.065256 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b-kubelet-dir\") pod \"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b\" (UID: \"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b\") " Dec 11 10:20:38 crc kubenswrapper[4663]: I1211 10:20:38.065392 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b" (UID: "671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:20:38 crc kubenswrapper[4663]: I1211 10:20:38.065440 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b-kube-api-access\") pod \"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b\" (UID: \"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b\") " Dec 11 10:20:38 crc kubenswrapper[4663]: I1211 10:20:38.065770 4663 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:38 crc kubenswrapper[4663]: I1211 10:20:38.072287 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b" (UID: "671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:20:38 crc kubenswrapper[4663]: I1211 10:20:38.166596 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:38 crc kubenswrapper[4663]: I1211 10:20:38.804430 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 10:20:38 crc kubenswrapper[4663]: I1211 10:20:38.804447 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b","Type":"ContainerDied","Data":"a44c5ee28692f9043ef72de57c4d05fbeda18afba5d5a84ccc106682d94c6351"} Dec 11 10:20:38 crc kubenswrapper[4663]: I1211 10:20:38.804881 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a44c5ee28692f9043ef72de57c4d05fbeda18afba5d5a84ccc106682d94c6351" Dec 11 10:20:39 crc kubenswrapper[4663]: I1211 10:20:39.312223 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:39 crc kubenswrapper[4663]: I1211 10:20:39.312296 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:39 crc kubenswrapper[4663]: I1211 10:20:39.511776 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:39 crc kubenswrapper[4663]: I1211 10:20:39.511818 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:39 crc kubenswrapper[4663]: I1211 10:20:39.549136 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:39 crc kubenswrapper[4663]: I1211 10:20:39.795395 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:39 crc kubenswrapper[4663]: I1211 10:20:39.795452 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:39 crc kubenswrapper[4663]: I1211 10:20:39.830062 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:39 crc kubenswrapper[4663]: I1211 10:20:39.998389 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:39 crc kubenswrapper[4663]: I1211 10:20:39.998444 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:40 crc kubenswrapper[4663]: I1211 10:20:40.029078 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:40 crc kubenswrapper[4663]: I1211 10:20:40.382887 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-mlbtj" podUID="3b63171f-dadd-465c-98c6-0197a59336a9" containerName="registry-server" probeResult="failure" output=< Dec 11 10:20:40 crc kubenswrapper[4663]: timeout: failed to connect service ":50051" within 1s Dec 11 10:20:40 crc kubenswrapper[4663]: > Dec 11 10:20:41 crc kubenswrapper[4663]: I1211 10:20:41.501458 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:41 crc kubenswrapper[4663]: I1211 10:20:41.501775 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:41 crc kubenswrapper[4663]: I1211 10:20:41.539092 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:41 crc kubenswrapper[4663]: I1211 10:20:41.855424 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:20:42 crc kubenswrapper[4663]: I1211 10:20:42.558312 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:42 crc kubenswrapper[4663]: I1211 10:20:42.558493 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:42 crc kubenswrapper[4663]: I1211 10:20:42.925495 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:42 crc kubenswrapper[4663]: I1211 10:20:42.925545 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:42 crc kubenswrapper[4663]: I1211 10:20:42.958440 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:43 crc kubenswrapper[4663]: I1211 10:20:43.592321 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zwkwc" podUID="2d163437-adb7-4c91-b925-72de9fe71120" containerName="registry-server" probeResult="failure" output=< Dec 11 10:20:43 crc kubenswrapper[4663]: timeout: failed to connect service ":50051" within 1s Dec 11 10:20:43 crc kubenswrapper[4663]: > Dec 11 10:20:43 crc kubenswrapper[4663]: I1211 10:20:43.862745 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:44 crc kubenswrapper[4663]: I1211 10:20:44.814149 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jnb74" Dec 11 10:20:45 crc kubenswrapper[4663]: I1211 10:20:45.843416 4663 generic.go:334] "Generic (PLEG): container finished" podID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" containerID="143250c64b6fa123d2969c1441b4c30f976d37fb9c9bfed4698cb09aad30ce31" exitCode=0 Dec 11 10:20:45 crc kubenswrapper[4663]: I1211 10:20:45.843703 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcx5r" event={"ID":"d71b3f6f-1e97-4471-a823-4e04a0e0c684","Type":"ContainerDied","Data":"143250c64b6fa123d2969c1441b4c30f976d37fb9c9bfed4698cb09aad30ce31"} Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.118324 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xlbqp"] Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.118585 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xlbqp" podUID="d2fb168f-d6a2-4800-bf80-01e7167cea09" containerName="registry-server" containerID="cri-o://f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae" gracePeriod=2 Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.413710 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.478667 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-catalog-content\") pod \"d2fb168f-d6a2-4800-bf80-01e7167cea09\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.478710 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-utilities\") pod \"d2fb168f-d6a2-4800-bf80-01e7167cea09\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.478807 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqltw\" (UniqueName: \"kubernetes.io/projected/d2fb168f-d6a2-4800-bf80-01e7167cea09-kube-api-access-dqltw\") pod \"d2fb168f-d6a2-4800-bf80-01e7167cea09\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.479715 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-utilities" (OuterVolumeSpecName: "utilities") pod "d2fb168f-d6a2-4800-bf80-01e7167cea09" (UID: "d2fb168f-d6a2-4800-bf80-01e7167cea09"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.484085 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2fb168f-d6a2-4800-bf80-01e7167cea09-kube-api-access-dqltw" (OuterVolumeSpecName: "kube-api-access-dqltw") pod "d2fb168f-d6a2-4800-bf80-01e7167cea09" (UID: "d2fb168f-d6a2-4800-bf80-01e7167cea09"). InnerVolumeSpecName "kube-api-access-dqltw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.579941 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d2fb168f-d6a2-4800-bf80-01e7167cea09" (UID: "d2fb168f-d6a2-4800-bf80-01e7167cea09"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.580493 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-catalog-content\") pod \"d2fb168f-d6a2-4800-bf80-01e7167cea09\" (UID: \"d2fb168f-d6a2-4800-bf80-01e7167cea09\") " Dec 11 10:20:46 crc kubenswrapper[4663]: W1211 10:20:46.580636 4663 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/d2fb168f-d6a2-4800-bf80-01e7167cea09/volumes/kubernetes.io~empty-dir/catalog-content Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.580681 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d2fb168f-d6a2-4800-bf80-01e7167cea09" (UID: "d2fb168f-d6a2-4800-bf80-01e7167cea09"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.580821 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqltw\" (UniqueName: \"kubernetes.io/projected/d2fb168f-d6a2-4800-bf80-01e7167cea09-kube-api-access-dqltw\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.580840 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.580857 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2fb168f-d6a2-4800-bf80-01e7167cea09-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.851405 4663 generic.go:334] "Generic (PLEG): container finished" podID="d2fb168f-d6a2-4800-bf80-01e7167cea09" containerID="f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae" exitCode=0 Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.851524 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xlbqp" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.851524 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlbqp" event={"ID":"d2fb168f-d6a2-4800-bf80-01e7167cea09","Type":"ContainerDied","Data":"f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae"} Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.851759 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlbqp" event={"ID":"d2fb168f-d6a2-4800-bf80-01e7167cea09","Type":"ContainerDied","Data":"fd55aad026e7f012d4476e8641e2d1ef6d522241b6e6d27806f2dd2b3fcdcb2c"} Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.851821 4663 scope.go:117] "RemoveContainer" containerID="f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.856241 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcx5r" event={"ID":"d71b3f6f-1e97-4471-a823-4e04a0e0c684","Type":"ContainerStarted","Data":"5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775"} Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.871064 4663 scope.go:117] "RemoveContainer" containerID="5a6c36c909dcf2d9be29d805b70b4b5738fbe014cbbfb3cc2f3449a1f08803d2" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.874083 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tcx5r" podStartSLOduration=1.902444187 podStartE2EDuration="35.874057421s" podCreationTimestamp="2025-12-11 10:20:11 +0000 UTC" firstStartedPulling="2025-12-11 10:20:12.474519467 +0000 UTC m=+143.292449251" lastFinishedPulling="2025-12-11 10:20:46.446132691 +0000 UTC m=+177.264062485" observedRunningTime="2025-12-11 10:20:46.872487611 +0000 UTC m=+177.690417396" watchObservedRunningTime="2025-12-11 10:20:46.874057421 +0000 UTC m=+177.691987206" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.883836 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xlbqp"] Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.888339 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xlbqp"] Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.899812 4663 scope.go:117] "RemoveContainer" containerID="90929084e76cbe40ab0af0b2f59c9103c99644ed21c5a0d5ebc4ab9200bd8131" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.913596 4663 scope.go:117] "RemoveContainer" containerID="f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae" Dec 11 10:20:46 crc kubenswrapper[4663]: E1211 10:20:46.913955 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae\": container with ID starting with f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae not found: ID does not exist" containerID="f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.914001 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae"} err="failed to get container status \"f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae\": rpc error: code = NotFound desc = could not find container \"f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae\": container with ID starting with f4f565ca8b8023d95b855ad11f140d56f1d811c19a61485fdfe95ab16db6f8ae not found: ID does not exist" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.914050 4663 scope.go:117] "RemoveContainer" containerID="5a6c36c909dcf2d9be29d805b70b4b5738fbe014cbbfb3cc2f3449a1f08803d2" Dec 11 10:20:46 crc kubenswrapper[4663]: E1211 10:20:46.914669 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a6c36c909dcf2d9be29d805b70b4b5738fbe014cbbfb3cc2f3449a1f08803d2\": container with ID starting with 5a6c36c909dcf2d9be29d805b70b4b5738fbe014cbbfb3cc2f3449a1f08803d2 not found: ID does not exist" containerID="5a6c36c909dcf2d9be29d805b70b4b5738fbe014cbbfb3cc2f3449a1f08803d2" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.914708 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a6c36c909dcf2d9be29d805b70b4b5738fbe014cbbfb3cc2f3449a1f08803d2"} err="failed to get container status \"5a6c36c909dcf2d9be29d805b70b4b5738fbe014cbbfb3cc2f3449a1f08803d2\": rpc error: code = NotFound desc = could not find container \"5a6c36c909dcf2d9be29d805b70b4b5738fbe014cbbfb3cc2f3449a1f08803d2\": container with ID starting with 5a6c36c909dcf2d9be29d805b70b4b5738fbe014cbbfb3cc2f3449a1f08803d2 not found: ID does not exist" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.914730 4663 scope.go:117] "RemoveContainer" containerID="90929084e76cbe40ab0af0b2f59c9103c99644ed21c5a0d5ebc4ab9200bd8131" Dec 11 10:20:46 crc kubenswrapper[4663]: E1211 10:20:46.915000 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90929084e76cbe40ab0af0b2f59c9103c99644ed21c5a0d5ebc4ab9200bd8131\": container with ID starting with 90929084e76cbe40ab0af0b2f59c9103c99644ed21c5a0d5ebc4ab9200bd8131 not found: ID does not exist" containerID="90929084e76cbe40ab0af0b2f59c9103c99644ed21c5a0d5ebc4ab9200bd8131" Dec 11 10:20:46 crc kubenswrapper[4663]: I1211 10:20:46.915025 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90929084e76cbe40ab0af0b2f59c9103c99644ed21c5a0d5ebc4ab9200bd8131"} err="failed to get container status \"90929084e76cbe40ab0af0b2f59c9103c99644ed21c5a0d5ebc4ab9200bd8131\": rpc error: code = NotFound desc = could not find container \"90929084e76cbe40ab0af0b2f59c9103c99644ed21c5a0d5ebc4ab9200bd8131\": container with ID starting with 90929084e76cbe40ab0af0b2f59c9103c99644ed21c5a0d5ebc4ab9200bd8131 not found: ID does not exist" Dec 11 10:20:47 crc kubenswrapper[4663]: I1211 10:20:47.512703 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2fb168f-d6a2-4800-bf80-01e7167cea09" path="/var/lib/kubelet/pods/d2fb168f-d6a2-4800-bf80-01e7167cea09/volumes" Dec 11 10:20:49 crc kubenswrapper[4663]: I1211 10:20:49.271037 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:20:49 crc kubenswrapper[4663]: I1211 10:20:49.271786 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:20:49 crc kubenswrapper[4663]: I1211 10:20:49.347561 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:49 crc kubenswrapper[4663]: I1211 10:20:49.379091 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:20:49 crc kubenswrapper[4663]: I1211 10:20:49.540068 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:20:49 crc kubenswrapper[4663]: I1211 10:20:49.834432 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:50 crc kubenswrapper[4663]: I1211 10:20:50.034440 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.906416 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.906792 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.920370 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ql2bc"] Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.920744 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ql2bc" podUID="c6c8ac96-598d-4a04-845d-84ed9cf435cf" containerName="registry-server" containerID="cri-o://d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071" gracePeriod=2 Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.948048 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.958247 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 10:20:51 crc kubenswrapper[4663]: E1211 10:20:51.958527 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29412828-843a-414f-acc0-c130aa563203" containerName="pruner" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.958552 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="29412828-843a-414f-acc0-c130aa563203" containerName="pruner" Dec 11 10:20:51 crc kubenswrapper[4663]: E1211 10:20:51.958562 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b" containerName="pruner" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.958570 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b" containerName="pruner" Dec 11 10:20:51 crc kubenswrapper[4663]: E1211 10:20:51.958581 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2fb168f-d6a2-4800-bf80-01e7167cea09" containerName="extract-utilities" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.958591 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2fb168f-d6a2-4800-bf80-01e7167cea09" containerName="extract-utilities" Dec 11 10:20:51 crc kubenswrapper[4663]: E1211 10:20:51.958604 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2fb168f-d6a2-4800-bf80-01e7167cea09" containerName="extract-content" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.958609 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2fb168f-d6a2-4800-bf80-01e7167cea09" containerName="extract-content" Dec 11 10:20:51 crc kubenswrapper[4663]: E1211 10:20:51.958618 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2fb168f-d6a2-4800-bf80-01e7167cea09" containerName="registry-server" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.958624 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2fb168f-d6a2-4800-bf80-01e7167cea09" containerName="registry-server" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.958745 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="671fd0cb-6807-4c8d-9c42-64c5dcc1ad2b" containerName="pruner" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.958757 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2fb168f-d6a2-4800-bf80-01e7167cea09" containerName="registry-server" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.958770 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="29412828-843a-414f-acc0-c130aa563203" containerName="pruner" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.959247 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.962347 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.962538 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 11 10:20:51 crc kubenswrapper[4663]: I1211 10:20:51.967748 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.059278 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/217976cb-9363-4d97-ba48-2716b0ceac99-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"217976cb-9363-4d97-ba48-2716b0ceac99\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.059341 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/217976cb-9363-4d97-ba48-2716b0ceac99-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"217976cb-9363-4d97-ba48-2716b0ceac99\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.160517 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/217976cb-9363-4d97-ba48-2716b0ceac99-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"217976cb-9363-4d97-ba48-2716b0ceac99\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.160625 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/217976cb-9363-4d97-ba48-2716b0ceac99-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"217976cb-9363-4d97-ba48-2716b0ceac99\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.160673 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/217976cb-9363-4d97-ba48-2716b0ceac99-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"217976cb-9363-4d97-ba48-2716b0ceac99\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.176656 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/217976cb-9363-4d97-ba48-2716b0ceac99-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"217976cb-9363-4d97-ba48-2716b0ceac99\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.208408 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.261032 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6c8ac96-598d-4a04-845d-84ed9cf435cf-catalog-content\") pod \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\" (UID: \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\") " Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.280717 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6c8ac96-598d-4a04-845d-84ed9cf435cf-utilities\") pod \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\" (UID: \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\") " Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.280855 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2bzg\" (UniqueName: \"kubernetes.io/projected/c6c8ac96-598d-4a04-845d-84ed9cf435cf-kube-api-access-h2bzg\") pod \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\" (UID: \"c6c8ac96-598d-4a04-845d-84ed9cf435cf\") " Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.281342 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6c8ac96-598d-4a04-845d-84ed9cf435cf-utilities" (OuterVolumeSpecName: "utilities") pod "c6c8ac96-598d-4a04-845d-84ed9cf435cf" (UID: "c6c8ac96-598d-4a04-845d-84ed9cf435cf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.283561 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6c8ac96-598d-4a04-845d-84ed9cf435cf-kube-api-access-h2bzg" (OuterVolumeSpecName: "kube-api-access-h2bzg") pod "c6c8ac96-598d-4a04-845d-84ed9cf435cf" (UID: "c6c8ac96-598d-4a04-845d-84ed9cf435cf"). InnerVolumeSpecName "kube-api-access-h2bzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.302372 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6c8ac96-598d-4a04-845d-84ed9cf435cf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c6c8ac96-598d-4a04-845d-84ed9cf435cf" (UID: "c6c8ac96-598d-4a04-845d-84ed9cf435cf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.313784 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.384003 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2bzg\" (UniqueName: \"kubernetes.io/projected/c6c8ac96-598d-4a04-845d-84ed9cf435cf-kube-api-access-h2bzg\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.384062 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6c8ac96-598d-4a04-845d-84ed9cf435cf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.384085 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6c8ac96-598d-4a04-845d-84ed9cf435cf-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.463620 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 10:20:52 crc kubenswrapper[4663]: W1211 10:20:52.471293 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod217976cb_9363_4d97_ba48_2716b0ceac99.slice/crio-08c2f88561dee1445634217323755b802eaefaaee56b60bd9c9f1dd08c6e01bd WatchSource:0}: Error finding container 08c2f88561dee1445634217323755b802eaefaaee56b60bd9c9f1dd08c6e01bd: Status 404 returned error can't find the container with id 08c2f88561dee1445634217323755b802eaefaaee56b60bd9c9f1dd08c6e01bd Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.518279 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fdgt7"] Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.519452 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fdgt7" podUID="a0d64b13-fea1-428a-a013-5f48ee4b93b7" containerName="registry-server" containerID="cri-o://3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9" gracePeriod=2 Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.591307 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.629736 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.814109 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.891046 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0d64b13-fea1-428a-a013-5f48ee4b93b7-catalog-content\") pod \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\" (UID: \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\") " Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.891243 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0d64b13-fea1-428a-a013-5f48ee4b93b7-utilities\") pod \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\" (UID: \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\") " Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.891342 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tw92w\" (UniqueName: \"kubernetes.io/projected/a0d64b13-fea1-428a-a013-5f48ee4b93b7-kube-api-access-tw92w\") pod \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\" (UID: \"a0d64b13-fea1-428a-a013-5f48ee4b93b7\") " Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.894479 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0d64b13-fea1-428a-a013-5f48ee4b93b7-utilities" (OuterVolumeSpecName: "utilities") pod "a0d64b13-fea1-428a-a013-5f48ee4b93b7" (UID: "a0d64b13-fea1-428a-a013-5f48ee4b93b7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.898204 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0d64b13-fea1-428a-a013-5f48ee4b93b7-kube-api-access-tw92w" (OuterVolumeSpecName: "kube-api-access-tw92w") pod "a0d64b13-fea1-428a-a013-5f48ee4b93b7" (UID: "a0d64b13-fea1-428a-a013-5f48ee4b93b7"). InnerVolumeSpecName "kube-api-access-tw92w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.901486 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"217976cb-9363-4d97-ba48-2716b0ceac99","Type":"ContainerStarted","Data":"2f4f4e6bbea3cce694346ccce68373f6ef4512c984dd9e1637e5c58bb21066d1"} Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.901532 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"217976cb-9363-4d97-ba48-2716b0ceac99","Type":"ContainerStarted","Data":"08c2f88561dee1445634217323755b802eaefaaee56b60bd9c9f1dd08c6e01bd"} Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.905207 4663 generic.go:334] "Generic (PLEG): container finished" podID="a0d64b13-fea1-428a-a013-5f48ee4b93b7" containerID="3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9" exitCode=0 Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.905284 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdgt7" event={"ID":"a0d64b13-fea1-428a-a013-5f48ee4b93b7","Type":"ContainerDied","Data":"3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9"} Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.905290 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fdgt7" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.905318 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdgt7" event={"ID":"a0d64b13-fea1-428a-a013-5f48ee4b93b7","Type":"ContainerDied","Data":"04294fc73a86f6d073a184a76c7074e05d4c1411515d10ef1932a9c848f3029d"} Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.905339 4663 scope.go:117] "RemoveContainer" containerID="3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.908478 4663 generic.go:334] "Generic (PLEG): container finished" podID="c6c8ac96-598d-4a04-845d-84ed9cf435cf" containerID="d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071" exitCode=0 Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.909094 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ql2bc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.909279 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql2bc" event={"ID":"c6c8ac96-598d-4a04-845d-84ed9cf435cf","Type":"ContainerDied","Data":"d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071"} Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.909351 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql2bc" event={"ID":"c6c8ac96-598d-4a04-845d-84ed9cf435cf","Type":"ContainerDied","Data":"c785ab1054e29c2b2759936fb016b80b0a97dc30b74af9505080e50c14163350"} Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.916894 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=1.91687483 podStartE2EDuration="1.91687483s" podCreationTimestamp="2025-12-11 10:20:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:52.914799698 +0000 UTC m=+183.732729481" watchObservedRunningTime="2025-12-11 10:20:52.91687483 +0000 UTC m=+183.734804605" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.926988 4663 scope.go:117] "RemoveContainer" containerID="41b5bd8a79b9634a399c08f4e6201b791d15a383a68d0450dc9dfee75b1b0ebc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.939103 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ql2bc"] Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.941777 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ql2bc"] Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.943564 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0d64b13-fea1-428a-a013-5f48ee4b93b7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a0d64b13-fea1-428a-a013-5f48ee4b93b7" (UID: "a0d64b13-fea1-428a-a013-5f48ee4b93b7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.947786 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.962970 4663 scope.go:117] "RemoveContainer" containerID="3ec7e8b731488832a0705cee56412e80179708266614250c3a1228b848fda0c3" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.975388 4663 scope.go:117] "RemoveContainer" containerID="3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9" Dec 11 10:20:52 crc kubenswrapper[4663]: E1211 10:20:52.975816 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9\": container with ID starting with 3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9 not found: ID does not exist" containerID="3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.975856 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9"} err="failed to get container status \"3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9\": rpc error: code = NotFound desc = could not find container \"3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9\": container with ID starting with 3e7575e5afdc139abd5b2fa51fb084a81efd0f406ea77db713ab5b7a83c3c0b9 not found: ID does not exist" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.975889 4663 scope.go:117] "RemoveContainer" containerID="41b5bd8a79b9634a399c08f4e6201b791d15a383a68d0450dc9dfee75b1b0ebc" Dec 11 10:20:52 crc kubenswrapper[4663]: E1211 10:20:52.976218 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41b5bd8a79b9634a399c08f4e6201b791d15a383a68d0450dc9dfee75b1b0ebc\": container with ID starting with 41b5bd8a79b9634a399c08f4e6201b791d15a383a68d0450dc9dfee75b1b0ebc not found: ID does not exist" containerID="41b5bd8a79b9634a399c08f4e6201b791d15a383a68d0450dc9dfee75b1b0ebc" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.976251 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41b5bd8a79b9634a399c08f4e6201b791d15a383a68d0450dc9dfee75b1b0ebc"} err="failed to get container status \"41b5bd8a79b9634a399c08f4e6201b791d15a383a68d0450dc9dfee75b1b0ebc\": rpc error: code = NotFound desc = could not find container \"41b5bd8a79b9634a399c08f4e6201b791d15a383a68d0450dc9dfee75b1b0ebc\": container with ID starting with 41b5bd8a79b9634a399c08f4e6201b791d15a383a68d0450dc9dfee75b1b0ebc not found: ID does not exist" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.976275 4663 scope.go:117] "RemoveContainer" containerID="3ec7e8b731488832a0705cee56412e80179708266614250c3a1228b848fda0c3" Dec 11 10:20:52 crc kubenswrapper[4663]: E1211 10:20:52.976583 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ec7e8b731488832a0705cee56412e80179708266614250c3a1228b848fda0c3\": container with ID starting with 3ec7e8b731488832a0705cee56412e80179708266614250c3a1228b848fda0c3 not found: ID does not exist" containerID="3ec7e8b731488832a0705cee56412e80179708266614250c3a1228b848fda0c3" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.976620 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ec7e8b731488832a0705cee56412e80179708266614250c3a1228b848fda0c3"} err="failed to get container status \"3ec7e8b731488832a0705cee56412e80179708266614250c3a1228b848fda0c3\": rpc error: code = NotFound desc = could not find container \"3ec7e8b731488832a0705cee56412e80179708266614250c3a1228b848fda0c3\": container with ID starting with 3ec7e8b731488832a0705cee56412e80179708266614250c3a1228b848fda0c3 not found: ID does not exist" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.976645 4663 scope.go:117] "RemoveContainer" containerID="d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.987098 4663 scope.go:117] "RemoveContainer" containerID="2c279a3bca5fcce5047b8902699ee59316640fa3a04ae11eb47c0ac2a34a2ac7" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.993313 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0d64b13-fea1-428a-a013-5f48ee4b93b7-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.993341 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tw92w\" (UniqueName: \"kubernetes.io/projected/a0d64b13-fea1-428a-a013-5f48ee4b93b7-kube-api-access-tw92w\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:52 crc kubenswrapper[4663]: I1211 10:20:52.993352 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0d64b13-fea1-428a-a013-5f48ee4b93b7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.000068 4663 scope.go:117] "RemoveContainer" containerID="693ef8a18f38188ebf4eff0b016b5322e8320f0fb612cdb17f85cfca376d0e08" Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.013427 4663 scope.go:117] "RemoveContainer" containerID="d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071" Dec 11 10:20:53 crc kubenswrapper[4663]: E1211 10:20:53.013795 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071\": container with ID starting with d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071 not found: ID does not exist" containerID="d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071" Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.013835 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071"} err="failed to get container status \"d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071\": rpc error: code = NotFound desc = could not find container \"d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071\": container with ID starting with d1e45577fab147efceedc53b20a8a3c821006b170ec982f60d3a271159cad071 not found: ID does not exist" Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.013880 4663 scope.go:117] "RemoveContainer" containerID="2c279a3bca5fcce5047b8902699ee59316640fa3a04ae11eb47c0ac2a34a2ac7" Dec 11 10:20:53 crc kubenswrapper[4663]: E1211 10:20:53.014279 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c279a3bca5fcce5047b8902699ee59316640fa3a04ae11eb47c0ac2a34a2ac7\": container with ID starting with 2c279a3bca5fcce5047b8902699ee59316640fa3a04ae11eb47c0ac2a34a2ac7 not found: ID does not exist" containerID="2c279a3bca5fcce5047b8902699ee59316640fa3a04ae11eb47c0ac2a34a2ac7" Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.014313 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c279a3bca5fcce5047b8902699ee59316640fa3a04ae11eb47c0ac2a34a2ac7"} err="failed to get container status \"2c279a3bca5fcce5047b8902699ee59316640fa3a04ae11eb47c0ac2a34a2ac7\": rpc error: code = NotFound desc = could not find container \"2c279a3bca5fcce5047b8902699ee59316640fa3a04ae11eb47c0ac2a34a2ac7\": container with ID starting with 2c279a3bca5fcce5047b8902699ee59316640fa3a04ae11eb47c0ac2a34a2ac7 not found: ID does not exist" Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.014338 4663 scope.go:117] "RemoveContainer" containerID="693ef8a18f38188ebf4eff0b016b5322e8320f0fb612cdb17f85cfca376d0e08" Dec 11 10:20:53 crc kubenswrapper[4663]: E1211 10:20:53.014744 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"693ef8a18f38188ebf4eff0b016b5322e8320f0fb612cdb17f85cfca376d0e08\": container with ID starting with 693ef8a18f38188ebf4eff0b016b5322e8320f0fb612cdb17f85cfca376d0e08 not found: ID does not exist" containerID="693ef8a18f38188ebf4eff0b016b5322e8320f0fb612cdb17f85cfca376d0e08" Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.014779 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"693ef8a18f38188ebf4eff0b016b5322e8320f0fb612cdb17f85cfca376d0e08"} err="failed to get container status \"693ef8a18f38188ebf4eff0b016b5322e8320f0fb612cdb17f85cfca376d0e08\": rpc error: code = NotFound desc = could not find container \"693ef8a18f38188ebf4eff0b016b5322e8320f0fb612cdb17f85cfca376d0e08\": container with ID starting with 693ef8a18f38188ebf4eff0b016b5322e8320f0fb612cdb17f85cfca376d0e08 not found: ID does not exist" Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.235086 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fdgt7"] Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.238113 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fdgt7"] Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.510830 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0d64b13-fea1-428a-a013-5f48ee4b93b7" path="/var/lib/kubelet/pods/a0d64b13-fea1-428a-a013-5f48ee4b93b7/volumes" Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.511425 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6c8ac96-598d-4a04-845d-84ed9cf435cf" path="/var/lib/kubelet/pods/c6c8ac96-598d-4a04-845d-84ed9cf435cf/volumes" Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.511904 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cgckf"] Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.917551 4663 generic.go:334] "Generic (PLEG): container finished" podID="217976cb-9363-4d97-ba48-2716b0ceac99" containerID="2f4f4e6bbea3cce694346ccce68373f6ef4512c984dd9e1637e5c58bb21066d1" exitCode=0 Dec 11 10:20:53 crc kubenswrapper[4663]: I1211 10:20:53.917598 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"217976cb-9363-4d97-ba48-2716b0ceac99","Type":"ContainerDied","Data":"2f4f4e6bbea3cce694346ccce68373f6ef4512c984dd9e1637e5c58bb21066d1"} Dec 11 10:20:54 crc kubenswrapper[4663]: I1211 10:20:54.916750 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcx5r"] Dec 11 10:20:54 crc kubenswrapper[4663]: I1211 10:20:54.923039 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tcx5r" podUID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" containerName="registry-server" containerID="cri-o://5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775" gracePeriod=2 Dec 11 10:20:54 crc kubenswrapper[4663]: I1211 10:20:54.935204 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.168586 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.242743 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.335717 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/217976cb-9363-4d97-ba48-2716b0ceac99-kube-api-access\") pod \"217976cb-9363-4d97-ba48-2716b0ceac99\" (UID: \"217976cb-9363-4d97-ba48-2716b0ceac99\") " Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.335770 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbdpw\" (UniqueName: \"kubernetes.io/projected/d71b3f6f-1e97-4471-a823-4e04a0e0c684-kube-api-access-sbdpw\") pod \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\" (UID: \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\") " Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.335891 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d71b3f6f-1e97-4471-a823-4e04a0e0c684-catalog-content\") pod \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\" (UID: \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\") " Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.335924 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d71b3f6f-1e97-4471-a823-4e04a0e0c684-utilities\") pod \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\" (UID: \"d71b3f6f-1e97-4471-a823-4e04a0e0c684\") " Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.335950 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/217976cb-9363-4d97-ba48-2716b0ceac99-kubelet-dir\") pod \"217976cb-9363-4d97-ba48-2716b0ceac99\" (UID: \"217976cb-9363-4d97-ba48-2716b0ceac99\") " Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.336228 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/217976cb-9363-4d97-ba48-2716b0ceac99-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "217976cb-9363-4d97-ba48-2716b0ceac99" (UID: "217976cb-9363-4d97-ba48-2716b0ceac99"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.336734 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d71b3f6f-1e97-4471-a823-4e04a0e0c684-utilities" (OuterVolumeSpecName: "utilities") pod "d71b3f6f-1e97-4471-a823-4e04a0e0c684" (UID: "d71b3f6f-1e97-4471-a823-4e04a0e0c684"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.342775 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/217976cb-9363-4d97-ba48-2716b0ceac99-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "217976cb-9363-4d97-ba48-2716b0ceac99" (UID: "217976cb-9363-4d97-ba48-2716b0ceac99"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.342829 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d71b3f6f-1e97-4471-a823-4e04a0e0c684-kube-api-access-sbdpw" (OuterVolumeSpecName: "kube-api-access-sbdpw") pod "d71b3f6f-1e97-4471-a823-4e04a0e0c684" (UID: "d71b3f6f-1e97-4471-a823-4e04a0e0c684"). InnerVolumeSpecName "kube-api-access-sbdpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.351980 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d71b3f6f-1e97-4471-a823-4e04a0e0c684-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d71b3f6f-1e97-4471-a823-4e04a0e0c684" (UID: "d71b3f6f-1e97-4471-a823-4e04a0e0c684"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.437847 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d71b3f6f-1e97-4471-a823-4e04a0e0c684-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.437885 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d71b3f6f-1e97-4471-a823-4e04a0e0c684-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.437896 4663 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/217976cb-9363-4d97-ba48-2716b0ceac99-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.437905 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/217976cb-9363-4d97-ba48-2716b0ceac99-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.437914 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbdpw\" (UniqueName: \"kubernetes.io/projected/d71b3f6f-1e97-4471-a823-4e04a0e0c684-kube-api-access-sbdpw\") on node \"crc\" DevicePath \"\"" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.929738 4663 generic.go:334] "Generic (PLEG): container finished" podID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" containerID="5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775" exitCode=0 Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.929826 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcx5r" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.929837 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcx5r" event={"ID":"d71b3f6f-1e97-4471-a823-4e04a0e0c684","Type":"ContainerDied","Data":"5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775"} Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.930241 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcx5r" event={"ID":"d71b3f6f-1e97-4471-a823-4e04a0e0c684","Type":"ContainerDied","Data":"673faf7eee2c2ce57656ee92f1840baea0d9ecc95e4d08142432fe3d8429979b"} Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.930271 4663 scope.go:117] "RemoveContainer" containerID="5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.932367 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"217976cb-9363-4d97-ba48-2716b0ceac99","Type":"ContainerDied","Data":"08c2f88561dee1445634217323755b802eaefaaee56b60bd9c9f1dd08c6e01bd"} Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.932414 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08c2f88561dee1445634217323755b802eaefaaee56b60bd9c9f1dd08c6e01bd" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.932472 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.945657 4663 scope.go:117] "RemoveContainer" containerID="143250c64b6fa123d2969c1441b4c30f976d37fb9c9bfed4698cb09aad30ce31" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.947321 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcx5r"] Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.950735 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcx5r"] Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.959505 4663 scope.go:117] "RemoveContainer" containerID="b7f021f191a98349a7a1591861195a3e6b946609f0a54d7af02ce72b032d177a" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.970689 4663 scope.go:117] "RemoveContainer" containerID="5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775" Dec 11 10:20:55 crc kubenswrapper[4663]: E1211 10:20:55.971149 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775\": container with ID starting with 5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775 not found: ID does not exist" containerID="5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.971192 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775"} err="failed to get container status \"5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775\": rpc error: code = NotFound desc = could not find container \"5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775\": container with ID starting with 5955958be1e2c76665289b9bc8ac75c96f4b22a7d2ad4459b3cc8a5f5e183775 not found: ID does not exist" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.971215 4663 scope.go:117] "RemoveContainer" containerID="143250c64b6fa123d2969c1441b4c30f976d37fb9c9bfed4698cb09aad30ce31" Dec 11 10:20:55 crc kubenswrapper[4663]: E1211 10:20:55.971623 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"143250c64b6fa123d2969c1441b4c30f976d37fb9c9bfed4698cb09aad30ce31\": container with ID starting with 143250c64b6fa123d2969c1441b4c30f976d37fb9c9bfed4698cb09aad30ce31 not found: ID does not exist" containerID="143250c64b6fa123d2969c1441b4c30f976d37fb9c9bfed4698cb09aad30ce31" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.971642 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"143250c64b6fa123d2969c1441b4c30f976d37fb9c9bfed4698cb09aad30ce31"} err="failed to get container status \"143250c64b6fa123d2969c1441b4c30f976d37fb9c9bfed4698cb09aad30ce31\": rpc error: code = NotFound desc = could not find container \"143250c64b6fa123d2969c1441b4c30f976d37fb9c9bfed4698cb09aad30ce31\": container with ID starting with 143250c64b6fa123d2969c1441b4c30f976d37fb9c9bfed4698cb09aad30ce31 not found: ID does not exist" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.971655 4663 scope.go:117] "RemoveContainer" containerID="b7f021f191a98349a7a1591861195a3e6b946609f0a54d7af02ce72b032d177a" Dec 11 10:20:55 crc kubenswrapper[4663]: E1211 10:20:55.971906 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7f021f191a98349a7a1591861195a3e6b946609f0a54d7af02ce72b032d177a\": container with ID starting with b7f021f191a98349a7a1591861195a3e6b946609f0a54d7af02ce72b032d177a not found: ID does not exist" containerID="b7f021f191a98349a7a1591861195a3e6b946609f0a54d7af02ce72b032d177a" Dec 11 10:20:55 crc kubenswrapper[4663]: I1211 10:20:55.971926 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7f021f191a98349a7a1591861195a3e6b946609f0a54d7af02ce72b032d177a"} err="failed to get container status \"b7f021f191a98349a7a1591861195a3e6b946609f0a54d7af02ce72b032d177a\": rpc error: code = NotFound desc = could not find container \"b7f021f191a98349a7a1591861195a3e6b946609f0a54d7af02ce72b032d177a\": container with ID starting with b7f021f191a98349a7a1591861195a3e6b946609f0a54d7af02ce72b032d177a not found: ID does not exist" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.511043 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" path="/var/lib/kubelet/pods/d71b3f6f-1e97-4471-a823-4e04a0e0c684/volumes" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.748858 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 10:20:57 crc kubenswrapper[4663]: E1211 10:20:57.749095 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0d64b13-fea1-428a-a013-5f48ee4b93b7" containerName="extract-utilities" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749110 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0d64b13-fea1-428a-a013-5f48ee4b93b7" containerName="extract-utilities" Dec 11 10:20:57 crc kubenswrapper[4663]: E1211 10:20:57.749125 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" containerName="extract-content" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749132 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" containerName="extract-content" Dec 11 10:20:57 crc kubenswrapper[4663]: E1211 10:20:57.749141 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" containerName="extract-utilities" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749147 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" containerName="extract-utilities" Dec 11 10:20:57 crc kubenswrapper[4663]: E1211 10:20:57.749156 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6c8ac96-598d-4a04-845d-84ed9cf435cf" containerName="extract-utilities" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749161 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6c8ac96-598d-4a04-845d-84ed9cf435cf" containerName="extract-utilities" Dec 11 10:20:57 crc kubenswrapper[4663]: E1211 10:20:57.749170 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0d64b13-fea1-428a-a013-5f48ee4b93b7" containerName="registry-server" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749176 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0d64b13-fea1-428a-a013-5f48ee4b93b7" containerName="registry-server" Dec 11 10:20:57 crc kubenswrapper[4663]: E1211 10:20:57.749198 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6c8ac96-598d-4a04-845d-84ed9cf435cf" containerName="extract-content" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749204 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6c8ac96-598d-4a04-845d-84ed9cf435cf" containerName="extract-content" Dec 11 10:20:57 crc kubenswrapper[4663]: E1211 10:20:57.749212 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6c8ac96-598d-4a04-845d-84ed9cf435cf" containerName="registry-server" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749217 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6c8ac96-598d-4a04-845d-84ed9cf435cf" containerName="registry-server" Dec 11 10:20:57 crc kubenswrapper[4663]: E1211 10:20:57.749228 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0d64b13-fea1-428a-a013-5f48ee4b93b7" containerName="extract-content" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749234 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0d64b13-fea1-428a-a013-5f48ee4b93b7" containerName="extract-content" Dec 11 10:20:57 crc kubenswrapper[4663]: E1211 10:20:57.749243 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="217976cb-9363-4d97-ba48-2716b0ceac99" containerName="pruner" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749249 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="217976cb-9363-4d97-ba48-2716b0ceac99" containerName="pruner" Dec 11 10:20:57 crc kubenswrapper[4663]: E1211 10:20:57.749259 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" containerName="registry-server" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749264 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" containerName="registry-server" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749347 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6c8ac96-598d-4a04-845d-84ed9cf435cf" containerName="registry-server" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749357 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="217976cb-9363-4d97-ba48-2716b0ceac99" containerName="pruner" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749364 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="d71b3f6f-1e97-4471-a823-4e04a0e0c684" containerName="registry-server" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749376 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0d64b13-fea1-428a-a013-5f48ee4b93b7" containerName="registry-server" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.749758 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.752067 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.752062 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.755864 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.871414 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d500c17b-98ef-4a99-af87-a9cc9db60f04-kubelet-dir\") pod \"installer-9-crc\" (UID: \"d500c17b-98ef-4a99-af87-a9cc9db60f04\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.871520 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d500c17b-98ef-4a99-af87-a9cc9db60f04-var-lock\") pod \"installer-9-crc\" (UID: \"d500c17b-98ef-4a99-af87-a9cc9db60f04\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.871788 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d500c17b-98ef-4a99-af87-a9cc9db60f04-kube-api-access\") pod \"installer-9-crc\" (UID: \"d500c17b-98ef-4a99-af87-a9cc9db60f04\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.973551 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d500c17b-98ef-4a99-af87-a9cc9db60f04-kube-api-access\") pod \"installer-9-crc\" (UID: \"d500c17b-98ef-4a99-af87-a9cc9db60f04\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.973613 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d500c17b-98ef-4a99-af87-a9cc9db60f04-kubelet-dir\") pod \"installer-9-crc\" (UID: \"d500c17b-98ef-4a99-af87-a9cc9db60f04\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.973634 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d500c17b-98ef-4a99-af87-a9cc9db60f04-var-lock\") pod \"installer-9-crc\" (UID: \"d500c17b-98ef-4a99-af87-a9cc9db60f04\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.973710 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d500c17b-98ef-4a99-af87-a9cc9db60f04-var-lock\") pod \"installer-9-crc\" (UID: \"d500c17b-98ef-4a99-af87-a9cc9db60f04\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.973747 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d500c17b-98ef-4a99-af87-a9cc9db60f04-kubelet-dir\") pod \"installer-9-crc\" (UID: \"d500c17b-98ef-4a99-af87-a9cc9db60f04\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:20:57 crc kubenswrapper[4663]: I1211 10:20:57.988801 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d500c17b-98ef-4a99-af87-a9cc9db60f04-kube-api-access\") pod \"installer-9-crc\" (UID: \"d500c17b-98ef-4a99-af87-a9cc9db60f04\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:20:58 crc kubenswrapper[4663]: I1211 10:20:58.062477 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:20:58 crc kubenswrapper[4663]: I1211 10:20:58.226520 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 10:20:58 crc kubenswrapper[4663]: I1211 10:20:58.950000 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d500c17b-98ef-4a99-af87-a9cc9db60f04","Type":"ContainerStarted","Data":"f3aea3b2db6aa0731d8e4d0a6e30afe64bb3255ea97ea8b5416a42b4f6dad19f"} Dec 11 10:20:58 crc kubenswrapper[4663]: I1211 10:20:58.950340 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d500c17b-98ef-4a99-af87-a9cc9db60f04","Type":"ContainerStarted","Data":"030c1b2d9020fc4c6acfd8952eceb7526972fdc47d76a0884d90280dbad38c5a"} Dec 11 10:20:58 crc kubenswrapper[4663]: I1211 10:20:58.961651 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=1.96163355 podStartE2EDuration="1.96163355s" podCreationTimestamp="2025-12-11 10:20:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:20:58.961008512 +0000 UTC m=+189.778938296" watchObservedRunningTime="2025-12-11 10:20:58.96163355 +0000 UTC m=+189.779563334" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.532119 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" podUID="a4cc9117-6286-44ba-a590-893cff694a7c" containerName="oauth-openshift" containerID="cri-o://9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d" gracePeriod=15 Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.801050 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.823662 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv"] Dec 11 10:21:18 crc kubenswrapper[4663]: E1211 10:21:18.823923 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4cc9117-6286-44ba-a590-893cff694a7c" containerName="oauth-openshift" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.823942 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4cc9117-6286-44ba-a590-893cff694a7c" containerName="oauth-openshift" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.824039 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4cc9117-6286-44ba-a590-893cff694a7c" containerName="oauth-openshift" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.824413 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.837212 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv"] Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.852394 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59clw\" (UniqueName: \"kubernetes.io/projected/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-kube-api-access-59clw\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.852445 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-session\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.852479 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.852547 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-user-template-error\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.852575 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-audit-dir\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.852608 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.852703 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-router-certs\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.852733 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.852758 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-user-template-login\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.852885 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-service-ca\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.852955 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.853020 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-audit-policies\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.853082 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.853109 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.953747 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-error\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.953785 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-idp-0-file-data\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.953811 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-session\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.953839 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-audit-policies\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.953857 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-ocp-branding-template\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.953894 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-serving-cert\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.953961 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-provider-selection\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954017 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-trusted-ca-bundle\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954034 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a4cc9117-6286-44ba-a590-893cff694a7c-audit-dir\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954051 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-router-certs\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954079 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-login\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954095 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-service-ca\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954113 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9g9k\" (UniqueName: \"kubernetes.io/projected/a4cc9117-6286-44ba-a590-893cff694a7c-kube-api-access-w9g9k\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954149 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-cliconfig\") pod \"a4cc9117-6286-44ba-a590-893cff694a7c\" (UID: \"a4cc9117-6286-44ba-a590-893cff694a7c\") " Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954284 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-router-certs\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954310 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954334 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-user-template-login\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954363 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954380 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-service-ca\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954383 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a4cc9117-6286-44ba-a590-893cff694a7c-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954414 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-audit-policies\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954440 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954457 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954478 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59clw\" (UniqueName: \"kubernetes.io/projected/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-kube-api-access-59clw\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954496 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-session\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954519 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954548 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-user-template-error\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954572 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-audit-dir\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954600 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954646 4663 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a4cc9117-6286-44ba-a590-893cff694a7c-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954748 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.954783 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.955531 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.955832 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.956318 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-service-ca\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.959543 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.959719 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-user-template-error\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.959864 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-router-certs\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.960023 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-session\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.960082 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-audit-dir\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.960079 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-audit-policies\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.960251 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.960597 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.960808 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.961160 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.961195 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.961523 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.961693 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.962028 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.962446 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.962606 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.962661 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.963149 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.963422 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4cc9117-6286-44ba-a590-893cff694a7c-kube-api-access-w9g9k" (OuterVolumeSpecName: "kube-api-access-w9g9k") pod "a4cc9117-6286-44ba-a590-893cff694a7c" (UID: "a4cc9117-6286-44ba-a590-893cff694a7c"). InnerVolumeSpecName "kube-api-access-w9g9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.963561 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-user-template-login\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.963709 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:18 crc kubenswrapper[4663]: I1211 10:21:18.968417 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59clw\" (UniqueName: \"kubernetes.io/projected/c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8-kube-api-access-59clw\") pod \"oauth-openshift-75d9c4b86c-2bvzv\" (UID: \"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8\") " pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055559 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055684 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055700 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055711 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055722 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055731 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9g9k\" (UniqueName: \"kubernetes.io/projected/a4cc9117-6286-44ba-a590-893cff694a7c-kube-api-access-w9g9k\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055740 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055750 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055758 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055766 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055774 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055783 4663 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a4cc9117-6286-44ba-a590-893cff694a7c-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.055792 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a4cc9117-6286-44ba-a590-893cff694a7c-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.061567 4663 generic.go:334] "Generic (PLEG): container finished" podID="a4cc9117-6286-44ba-a590-893cff694a7c" containerID="9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d" exitCode=0 Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.061604 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" event={"ID":"a4cc9117-6286-44ba-a590-893cff694a7c","Type":"ContainerDied","Data":"9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d"} Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.061639 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.061655 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cgckf" event={"ID":"a4cc9117-6286-44ba-a590-893cff694a7c","Type":"ContainerDied","Data":"03f968a22326f30ba6ce154bc405c4ab4d6daeefb8d9bf488dbd5677609e702d"} Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.061676 4663 scope.go:117] "RemoveContainer" containerID="9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.075851 4663 scope.go:117] "RemoveContainer" containerID="9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d" Dec 11 10:21:19 crc kubenswrapper[4663]: E1211 10:21:19.076334 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d\": container with ID starting with 9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d not found: ID does not exist" containerID="9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.076364 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d"} err="failed to get container status \"9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d\": rpc error: code = NotFound desc = could not find container \"9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d\": container with ID starting with 9e2a2e59c6a1836cca4f934d9e66d29c61df22777db4150014f7e6c9d168aa0d not found: ID does not exist" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.082775 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cgckf"] Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.087723 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cgckf"] Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.141924 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.271222 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.271284 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.271416 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.272657 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b"} pod="openshift-machine-config-operator/machine-config-daemon-mq524" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.272724 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" containerID="cri-o://0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b" gracePeriod=600 Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.278981 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv"] Dec 11 10:21:19 crc kubenswrapper[4663]: I1211 10:21:19.510110 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4cc9117-6286-44ba-a590-893cff694a7c" path="/var/lib/kubelet/pods/a4cc9117-6286-44ba-a590-893cff694a7c/volumes" Dec 11 10:21:20 crc kubenswrapper[4663]: I1211 10:21:20.068586 4663 generic.go:334] "Generic (PLEG): container finished" podID="98941020-2764-490d-81db-9decd21aaa13" containerID="0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b" exitCode=0 Dec 11 10:21:20 crc kubenswrapper[4663]: I1211 10:21:20.068633 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerDied","Data":"0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b"} Dec 11 10:21:20 crc kubenswrapper[4663]: I1211 10:21:20.068785 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerStarted","Data":"894133a128575857e18fd9f49a2fe8c20c1e724cdefaa6dd3b25402ad2d9e35b"} Dec 11 10:21:20 crc kubenswrapper[4663]: I1211 10:21:20.069830 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" event={"ID":"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8","Type":"ContainerStarted","Data":"0a02026756212826eb5339861053a740c911b047838e0d4c544bf05c0e61ca67"} Dec 11 10:21:20 crc kubenswrapper[4663]: I1211 10:21:20.069875 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" event={"ID":"c4a144f1-ec1a-4b4d-ad05-3daa0b9d15c8","Type":"ContainerStarted","Data":"d09ee84b0059a0b7386f79aab338e0a30454ed040fdeaeb5637a5adc98f24f82"} Dec 11 10:21:20 crc kubenswrapper[4663]: I1211 10:21:20.070016 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:20 crc kubenswrapper[4663]: I1211 10:21:20.074542 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" Dec 11 10:21:20 crc kubenswrapper[4663]: I1211 10:21:20.096236 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-75d9c4b86c-2bvzv" podStartSLOduration=27.096222584 podStartE2EDuration="27.096222584s" podCreationTimestamp="2025-12-11 10:20:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:21:20.094029773 +0000 UTC m=+210.911959557" watchObservedRunningTime="2025-12-11 10:21:20.096222584 +0000 UTC m=+210.914152359" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.120135 4663 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.121137 4663 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.121323 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.121476 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55" gracePeriod=15 Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.121622 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f" gracePeriod=15 Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.121664 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55" gracePeriod=15 Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.121669 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9" gracePeriod=15 Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.121726 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537" gracePeriod=15 Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122015 4663 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 10:21:36 crc kubenswrapper[4663]: E1211 10:21:36.122173 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122199 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 10:21:36 crc kubenswrapper[4663]: E1211 10:21:36.122208 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122213 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 11 10:21:36 crc kubenswrapper[4663]: E1211 10:21:36.122221 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122227 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 10:21:36 crc kubenswrapper[4663]: E1211 10:21:36.122236 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122242 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 10:21:36 crc kubenswrapper[4663]: E1211 10:21:36.122249 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122255 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 10:21:36 crc kubenswrapper[4663]: E1211 10:21:36.122268 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122274 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 10:21:36 crc kubenswrapper[4663]: E1211 10:21:36.122282 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122289 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122362 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122373 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122384 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122391 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122400 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.122563 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.145511 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.145559 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.145602 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.145632 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.145655 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.145671 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.145689 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.145770 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.149054 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247041 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247251 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247279 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247254 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247294 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247315 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247348 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247380 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247404 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247408 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247429 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247445 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247468 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247520 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247541 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.247559 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: I1211 10:21:36.449686 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:21:36 crc kubenswrapper[4663]: W1211 10:21:36.465987 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-0b6a56be276fc130b7ee2c217488aae4f67a8058e5ea66668540cfebb095b135 WatchSource:0}: Error finding container 0b6a56be276fc130b7ee2c217488aae4f67a8058e5ea66668540cfebb095b135: Status 404 returned error can't find the container with id 0b6a56be276fc130b7ee2c217488aae4f67a8058e5ea66668540cfebb095b135 Dec 11 10:21:36 crc kubenswrapper[4663]: E1211 10:21:36.468574 4663 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.25.89:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1880220dc537270f openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 10:21:36.467879695 +0000 UTC m=+227.285809480,LastTimestamp:2025-12-11 10:21:36.467879695 +0000 UTC m=+227.285809480,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.142994 4663 generic.go:334] "Generic (PLEG): container finished" podID="d500c17b-98ef-4a99-af87-a9cc9db60f04" containerID="f3aea3b2db6aa0731d8e4d0a6e30afe64bb3255ea97ea8b5416a42b4f6dad19f" exitCode=0 Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.143073 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d500c17b-98ef-4a99-af87-a9cc9db60f04","Type":"ContainerDied","Data":"f3aea3b2db6aa0731d8e4d0a6e30afe64bb3255ea97ea8b5416a42b4f6dad19f"} Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.143816 4663 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.144156 4663 status_manager.go:851] "Failed to get status for pod" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.144316 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"c7885cbb424652f796c5a1b39ca0729218de783212443ea7aaf706edfb023f6d"} Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.144363 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"0b6a56be276fc130b7ee2c217488aae4f67a8058e5ea66668540cfebb095b135"} Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.144406 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.144642 4663 status_manager.go:851] "Failed to get status for pod" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.144928 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.145327 4663 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.146207 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.147373 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.147969 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f" exitCode=0 Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.147990 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537" exitCode=0 Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.147998 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9" exitCode=0 Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.148005 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55" exitCode=2 Dec 11 10:21:37 crc kubenswrapper[4663]: I1211 10:21:37.148043 4663 scope.go:117] "RemoveContainer" containerID="0068e2981a8c781e9563c1714bf0be7f35b5510f1fd7e52ab4cc83a0c0ec0c0e" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.153504 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.402201 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.403374 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.403768 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.404086 4663 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.404558 4663 status_manager.go:851] "Failed to get status for pod" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.404998 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.405309 4663 status_manager.go:851] "Failed to get status for pod" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.405563 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.405814 4663 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.470504 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.470549 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d500c17b-98ef-4a99-af87-a9cc9db60f04-var-lock\") pod \"d500c17b-98ef-4a99-af87-a9cc9db60f04\" (UID: \"d500c17b-98ef-4a99-af87-a9cc9db60f04\") " Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.470566 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.470593 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d500c17b-98ef-4a99-af87-a9cc9db60f04-kubelet-dir\") pod \"d500c17b-98ef-4a99-af87-a9cc9db60f04\" (UID: \"d500c17b-98ef-4a99-af87-a9cc9db60f04\") " Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.470607 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.470650 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d500c17b-98ef-4a99-af87-a9cc9db60f04-kube-api-access\") pod \"d500c17b-98ef-4a99-af87-a9cc9db60f04\" (UID: \"d500c17b-98ef-4a99-af87-a9cc9db60f04\") " Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.470665 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.470738 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.470915 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d500c17b-98ef-4a99-af87-a9cc9db60f04-var-lock" (OuterVolumeSpecName: "var-lock") pod "d500c17b-98ef-4a99-af87-a9cc9db60f04" (UID: "d500c17b-98ef-4a99-af87-a9cc9db60f04"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.470945 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d500c17b-98ef-4a99-af87-a9cc9db60f04-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d500c17b-98ef-4a99-af87-a9cc9db60f04" (UID: "d500c17b-98ef-4a99-af87-a9cc9db60f04"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.470958 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.471042 4663 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.471056 4663 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d500c17b-98ef-4a99-af87-a9cc9db60f04-var-lock\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.471068 4663 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.471076 4663 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d500c17b-98ef-4a99-af87-a9cc9db60f04-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.471083 4663 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.475493 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d500c17b-98ef-4a99-af87-a9cc9db60f04-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d500c17b-98ef-4a99-af87-a9cc9db60f04" (UID: "d500c17b-98ef-4a99-af87-a9cc9db60f04"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:21:38 crc kubenswrapper[4663]: I1211 10:21:38.572444 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d500c17b-98ef-4a99-af87-a9cc9db60f04-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.161108 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.161921 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55" exitCode=0 Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.162004 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.162084 4663 scope.go:117] "RemoveContainer" containerID="a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.163087 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d500c17b-98ef-4a99-af87-a9cc9db60f04","Type":"ContainerDied","Data":"030c1b2d9020fc4c6acfd8952eceb7526972fdc47d76a0884d90280dbad38c5a"} Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.163125 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.163114 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="030c1b2d9020fc4c6acfd8952eceb7526972fdc47d76a0884d90280dbad38c5a" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.174313 4663 scope.go:117] "RemoveContainer" containerID="2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.176033 4663 status_manager.go:851] "Failed to get status for pod" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.176262 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.176422 4663 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.176794 4663 status_manager.go:851] "Failed to get status for pod" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.177010 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.177251 4663 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.184956 4663 scope.go:117] "RemoveContainer" containerID="5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.194423 4663 scope.go:117] "RemoveContainer" containerID="83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.204759 4663 scope.go:117] "RemoveContainer" containerID="ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.219839 4663 scope.go:117] "RemoveContainer" containerID="2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.234787 4663 scope.go:117] "RemoveContainer" containerID="a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f" Dec 11 10:21:39 crc kubenswrapper[4663]: E1211 10:21:39.235068 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\": container with ID starting with a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f not found: ID does not exist" containerID="a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.235098 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f"} err="failed to get container status \"a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\": rpc error: code = NotFound desc = could not find container \"a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f\": container with ID starting with a89c5b395c04b94c4476db7bf1edcfe175fb552dbbf4145a7031c0f86e139e9f not found: ID does not exist" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.235123 4663 scope.go:117] "RemoveContainer" containerID="2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537" Dec 11 10:21:39 crc kubenswrapper[4663]: E1211 10:21:39.235422 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\": container with ID starting with 2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537 not found: ID does not exist" containerID="2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.235459 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537"} err="failed to get container status \"2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\": rpc error: code = NotFound desc = could not find container \"2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537\": container with ID starting with 2c1a0dcfabc032353060f8cb94fc8c601da6b20a3e2c35b84229bf66c9256537 not found: ID does not exist" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.235484 4663 scope.go:117] "RemoveContainer" containerID="5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9" Dec 11 10:21:39 crc kubenswrapper[4663]: E1211 10:21:39.235741 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\": container with ID starting with 5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9 not found: ID does not exist" containerID="5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.235767 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9"} err="failed to get container status \"5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\": rpc error: code = NotFound desc = could not find container \"5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9\": container with ID starting with 5d9bf7e6f64790700d3765c1935fa7eb13fa839a96617117bb226a8b86090fe9 not found: ID does not exist" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.235783 4663 scope.go:117] "RemoveContainer" containerID="83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55" Dec 11 10:21:39 crc kubenswrapper[4663]: E1211 10:21:39.235998 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\": container with ID starting with 83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55 not found: ID does not exist" containerID="83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.236020 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55"} err="failed to get container status \"83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\": rpc error: code = NotFound desc = could not find container \"83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55\": container with ID starting with 83cef2539e49e0251b7cdab06b7a9002344f7ee41150f84b3f648372b4d50b55 not found: ID does not exist" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.236037 4663 scope.go:117] "RemoveContainer" containerID="ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55" Dec 11 10:21:39 crc kubenswrapper[4663]: E1211 10:21:39.236274 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\": container with ID starting with ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55 not found: ID does not exist" containerID="ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.236301 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55"} err="failed to get container status \"ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\": rpc error: code = NotFound desc = could not find container \"ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55\": container with ID starting with ba7f98a60c12531c6766887ffac909316494552cc1acb036c5609e6400ac5b55 not found: ID does not exist" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.236321 4663 scope.go:117] "RemoveContainer" containerID="2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356" Dec 11 10:21:39 crc kubenswrapper[4663]: E1211 10:21:39.236510 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\": container with ID starting with 2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356 not found: ID does not exist" containerID="2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.236531 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356"} err="failed to get container status \"2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\": rpc error: code = NotFound desc = could not find container \"2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356\": container with ID starting with 2e6bdff8d061ec5045441f67448d895a6c5a332ebe43b24eb8e983bba1c34356 not found: ID does not exist" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.506530 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.507161 4663 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.507454 4663 status_manager.go:851] "Failed to get status for pod" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:39 crc kubenswrapper[4663]: I1211 10:21:39.510631 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 11 10:21:42 crc kubenswrapper[4663]: E1211 10:21:42.278397 4663 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.25.89:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1880220dc537270f openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 10:21:36.467879695 +0000 UTC m=+227.285809480,LastTimestamp:2025-12-11 10:21:36.467879695 +0000 UTC m=+227.285809480,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 10:21:44 crc kubenswrapper[4663]: E1211 10:21:44.166125 4663 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:44 crc kubenswrapper[4663]: E1211 10:21:44.166552 4663 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:44 crc kubenswrapper[4663]: E1211 10:21:44.166761 4663 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:44 crc kubenswrapper[4663]: E1211 10:21:44.166948 4663 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:44 crc kubenswrapper[4663]: E1211 10:21:44.167151 4663 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:44 crc kubenswrapper[4663]: I1211 10:21:44.167220 4663 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 11 10:21:44 crc kubenswrapper[4663]: E1211 10:21:44.167433 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="200ms" Dec 11 10:21:44 crc kubenswrapper[4663]: E1211 10:21:44.368592 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="400ms" Dec 11 10:21:44 crc kubenswrapper[4663]: E1211 10:21:44.769087 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="800ms" Dec 11 10:21:45 crc kubenswrapper[4663]: E1211 10:21:45.570153 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="1.6s" Dec 11 10:21:47 crc kubenswrapper[4663]: E1211 10:21:47.171302 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="3.2s" Dec 11 10:21:49 crc kubenswrapper[4663]: I1211 10:21:49.506699 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:49 crc kubenswrapper[4663]: I1211 10:21:49.508530 4663 status_manager.go:851] "Failed to get status for pod" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:50 crc kubenswrapper[4663]: E1211 10:21:50.373043 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="6.4s" Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.218360 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.218413 4663 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507" exitCode=1 Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.218444 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507"} Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.218828 4663 scope.go:117] "RemoveContainer" containerID="52c5a9fef9792d964d1b4505a0e5857652454e088fbc8293524f644afd044507" Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.219176 4663 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.219536 4663 status_manager.go:851] "Failed to get status for pod" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.219836 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.504262 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.505050 4663 status_manager.go:851] "Failed to get status for pod" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.505322 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.506152 4663 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.519100 4663 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b934597e-ac43-4d21-82b1-c8d4260800e6" Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.519123 4663 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b934597e-ac43-4d21-82b1-c8d4260800e6" Dec 11 10:21:51 crc kubenswrapper[4663]: E1211 10:21:51.519348 4663 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:51 crc kubenswrapper[4663]: I1211 10:21:51.519725 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.228532 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.229455 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d2afa96237b3f4dfb8061bebc12d169876c89ea6fec7a0155c0cf85e6d79f86b"} Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.230452 4663 status_manager.go:851] "Failed to get status for pod" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.230735 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.231024 4663 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.231728 4663 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="c147056791e7378fb39c7ba3e21291a0a4c02645cee137164e688ac66f48552f" exitCode=0 Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.231760 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"c147056791e7378fb39c7ba3e21291a0a4c02645cee137164e688ac66f48552f"} Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.231775 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0e796cf5c2bbcd1fb7501519b5a4b8a8a046a4ef3b892ffcd2e5ad7077f20696"} Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.231963 4663 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b934597e-ac43-4d21-82b1-c8d4260800e6" Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.231983 4663 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b934597e-ac43-4d21-82b1-c8d4260800e6" Dec 11 10:21:52 crc kubenswrapper[4663]: E1211 10:21:52.232229 4663 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.232377 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.233109 4663 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:52 crc kubenswrapper[4663]: I1211 10:21:52.233671 4663 status_manager.go:851] "Failed to get status for pod" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 11 10:21:52 crc kubenswrapper[4663]: E1211 10:21:52.280097 4663 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.25.89:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1880220dc537270f openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 10:21:36.467879695 +0000 UTC m=+227.285809480,LastTimestamp:2025-12-11 10:21:36.467879695 +0000 UTC m=+227.285809480,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 10:21:53 crc kubenswrapper[4663]: I1211 10:21:53.239809 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"57a6285ada79e6fbe8cd4126fad8f034ab42bf53ecdb1ee98dab5857a3ff43aa"} Dec 11 10:21:53 crc kubenswrapper[4663]: I1211 10:21:53.240102 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3c48cd6e3ed68d81c84f4e5905ee27b8b83f0989ff7752b847adee5edadd81e3"} Dec 11 10:21:53 crc kubenswrapper[4663]: I1211 10:21:53.240115 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0cf296637e53729b3d66dfaac8578cd0c940b9016c9bbd045620c0baff439c32"} Dec 11 10:21:53 crc kubenswrapper[4663]: I1211 10:21:53.240124 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"127cd3b50206f809bf77c1686204db1f7f839408805e82f9f44d721f96380902"} Dec 11 10:21:53 crc kubenswrapper[4663]: I1211 10:21:53.240132 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c6fef90dcf6857eb94088c531eec873df5bf704245836ba32809b6837591196a"} Dec 11 10:21:53 crc kubenswrapper[4663]: I1211 10:21:53.240364 4663 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b934597e-ac43-4d21-82b1-c8d4260800e6" Dec 11 10:21:53 crc kubenswrapper[4663]: I1211 10:21:53.240377 4663 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b934597e-ac43-4d21-82b1-c8d4260800e6" Dec 11 10:21:53 crc kubenswrapper[4663]: I1211 10:21:53.240719 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:53 crc kubenswrapper[4663]: I1211 10:21:53.689901 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:21:56 crc kubenswrapper[4663]: I1211 10:21:56.520941 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:56 crc kubenswrapper[4663]: I1211 10:21:56.521388 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:56 crc kubenswrapper[4663]: I1211 10:21:56.526870 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:58 crc kubenswrapper[4663]: I1211 10:21:58.555819 4663 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:59 crc kubenswrapper[4663]: I1211 10:21:59.269077 4663 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b934597e-ac43-4d21-82b1-c8d4260800e6" Dec 11 10:21:59 crc kubenswrapper[4663]: I1211 10:21:59.269114 4663 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b934597e-ac43-4d21-82b1-c8d4260800e6" Dec 11 10:21:59 crc kubenswrapper[4663]: I1211 10:21:59.272656 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:21:59 crc kubenswrapper[4663]: I1211 10:21:59.520696 4663 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="9b46117e-d689-47e0-a334-7e50c6592dbf" Dec 11 10:22:00 crc kubenswrapper[4663]: I1211 10:22:00.274354 4663 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b934597e-ac43-4d21-82b1-c8d4260800e6" Dec 11 10:22:00 crc kubenswrapper[4663]: I1211 10:22:00.274397 4663 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b934597e-ac43-4d21-82b1-c8d4260800e6" Dec 11 10:22:00 crc kubenswrapper[4663]: I1211 10:22:00.277068 4663 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="9b46117e-d689-47e0-a334-7e50c6592dbf" Dec 11 10:22:00 crc kubenswrapper[4663]: I1211 10:22:00.903680 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:22:00 crc kubenswrapper[4663]: I1211 10:22:00.907434 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:22:01 crc kubenswrapper[4663]: I1211 10:22:01.284767 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 10:22:08 crc kubenswrapper[4663]: I1211 10:22:08.863283 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 11 10:22:08 crc kubenswrapper[4663]: I1211 10:22:08.967833 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 11 10:22:09 crc kubenswrapper[4663]: I1211 10:22:09.363746 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 11 10:22:09 crc kubenswrapper[4663]: I1211 10:22:09.469172 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 11 10:22:09 crc kubenswrapper[4663]: I1211 10:22:09.571537 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 11 10:22:09 crc kubenswrapper[4663]: I1211 10:22:09.624224 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 11 10:22:09 crc kubenswrapper[4663]: I1211 10:22:09.628796 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 11 10:22:09 crc kubenswrapper[4663]: I1211 10:22:09.936424 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 11 10:22:09 crc kubenswrapper[4663]: I1211 10:22:09.936681 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 11 10:22:10 crc kubenswrapper[4663]: I1211 10:22:10.178306 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 11 10:22:10 crc kubenswrapper[4663]: I1211 10:22:10.310147 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 11 10:22:10 crc kubenswrapper[4663]: I1211 10:22:10.321837 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 11 10:22:10 crc kubenswrapper[4663]: I1211 10:22:10.680675 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 11 10:22:10 crc kubenswrapper[4663]: I1211 10:22:10.769569 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 11 10:22:10 crc kubenswrapper[4663]: I1211 10:22:10.771857 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 11 10:22:10 crc kubenswrapper[4663]: I1211 10:22:10.944039 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 11 10:22:11 crc kubenswrapper[4663]: I1211 10:22:11.047308 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 11 10:22:11 crc kubenswrapper[4663]: I1211 10:22:11.092584 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 11 10:22:11 crc kubenswrapper[4663]: I1211 10:22:11.095839 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 11 10:22:11 crc kubenswrapper[4663]: I1211 10:22:11.178071 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 10:22:11 crc kubenswrapper[4663]: I1211 10:22:11.237124 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 11 10:22:11 crc kubenswrapper[4663]: I1211 10:22:11.506251 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 11 10:22:11 crc kubenswrapper[4663]: I1211 10:22:11.695807 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 10:22:11 crc kubenswrapper[4663]: I1211 10:22:11.747441 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 11 10:22:11 crc kubenswrapper[4663]: I1211 10:22:11.854678 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 10:22:11 crc kubenswrapper[4663]: I1211 10:22:11.932030 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.090923 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.094153 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.096247 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.127425 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.179148 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.315322 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.338105 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.360019 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.367881 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.383826 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.447175 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.481068 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.537917 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.589467 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.608460 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.644482 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.658863 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.703106 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.720057 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.794942 4663 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.796038 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.844388 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.880940 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.925044 4663 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 11 10:22:12 crc kubenswrapper[4663]: I1211 10:22:12.993661 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.122713 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.165381 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.194894 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.228944 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.244136 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.468172 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.576050 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.578262 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.624719 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.695141 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.734157 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.744365 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.840500 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.884914 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 11 10:22:13 crc kubenswrapper[4663]: I1211 10:22:13.929252 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.019438 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.025739 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.098112 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.113789 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.127703 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.143240 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.212323 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.226454 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.230155 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.326489 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.329886 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.339247 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.343352 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.463521 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.468753 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.536850 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.638638 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.640004 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.676579 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.725698 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 11 10:22:14 crc kubenswrapper[4663]: I1211 10:22:14.979898 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.008980 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.033763 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.111909 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.185551 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.229072 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.267679 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.282993 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.421137 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.614796 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.740077 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.805337 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.880237 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.880539 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.882668 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.911944 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.919315 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.919594 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.938217 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.958815 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 11 10:22:15 crc kubenswrapper[4663]: I1211 10:22:15.999565 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.046075 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.066867 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.111717 4663 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.162156 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.164992 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.184108 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.190449 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.198022 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.300327 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.404016 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.411350 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.479324 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.583227 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.709126 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.742148 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 11 10:22:16 crc kubenswrapper[4663]: I1211 10:22:16.858148 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.040096 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.049483 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.084374 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.091952 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.123917 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.143473 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.153923 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.193757 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.207902 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.247167 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.299639 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.327443 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.475058 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.569077 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.606773 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.744675 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.779543 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 11 10:22:17 crc kubenswrapper[4663]: I1211 10:22:17.814088 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.025718 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.102312 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.284870 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.356108 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.458486 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.465243 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.492462 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.585610 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.615774 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.645397 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.699465 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.703447 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.707148 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.725118 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.731036 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.751478 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.761044 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.788310 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.905480 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.937969 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.942875 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.944780 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 11 10:22:18 crc kubenswrapper[4663]: I1211 10:22:18.962470 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.046238 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.093134 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.263971 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.297292 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.355776 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.372053 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.385690 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.402547 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.422092 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.445061 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.575992 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.582832 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.587261 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.736675 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.961259 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 11 10:22:19 crc kubenswrapper[4663]: I1211 10:22:19.987294 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.004837 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.022609 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.111263 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.215085 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.266717 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.306709 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.369812 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.437318 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.475595 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.494800 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.512418 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.516702 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.525616 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.554935 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.593737 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.643273 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.684753 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.730922 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.753845 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.839823 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.843219 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 11 10:22:20 crc kubenswrapper[4663]: I1211 10:22:20.963045 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.113943 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.375512 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.457201 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.544353 4663 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.598575 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.626901 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.685556 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.697084 4663 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.758673 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.812706 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.843653 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.885603 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 11 10:22:21 crc kubenswrapper[4663]: I1211 10:22:21.974569 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.001825 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.069414 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.119698 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.121212 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.122997 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.258085 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.327885 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.370277 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.475941 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.592117 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.609552 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.624625 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.758076 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.777117 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.806787 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.825552 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.909443 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.932138 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 11 10:22:22 crc kubenswrapper[4663]: I1211 10:22:22.936965 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 11 10:22:23 crc kubenswrapper[4663]: I1211 10:22:23.207696 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 11 10:22:23 crc kubenswrapper[4663]: I1211 10:22:23.244719 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 11 10:22:23 crc kubenswrapper[4663]: I1211 10:22:23.455470 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 11 10:22:23 crc kubenswrapper[4663]: I1211 10:22:23.844673 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 11 10:22:23 crc kubenswrapper[4663]: I1211 10:22:23.939731 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 11 10:22:23 crc kubenswrapper[4663]: I1211 10:22:23.962705 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 11 10:22:24 crc kubenswrapper[4663]: I1211 10:22:24.211878 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 11 10:22:24 crc kubenswrapper[4663]: I1211 10:22:24.365059 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 11 10:22:25 crc kubenswrapper[4663]: I1211 10:22:25.189700 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 11 10:22:25 crc kubenswrapper[4663]: I1211 10:22:25.193466 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 11 10:22:26 crc kubenswrapper[4663]: I1211 10:22:26.044257 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 10:22:40 crc kubenswrapper[4663]: I1211 10:22:40.464312 4663 generic.go:334] "Generic (PLEG): container finished" podID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerID="e4fff5a3f5be54137cdcf6a3002394c1c909cd92de49770bfc4f3ba925121246" exitCode=0 Dec 11 10:22:40 crc kubenswrapper[4663]: I1211 10:22:40.464390 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" event={"ID":"05009b2f-f136-4f9d-8d58-169816bb1ea6","Type":"ContainerDied","Data":"e4fff5a3f5be54137cdcf6a3002394c1c909cd92de49770bfc4f3ba925121246"} Dec 11 10:22:40 crc kubenswrapper[4663]: I1211 10:22:40.465445 4663 scope.go:117] "RemoveContainer" containerID="e4fff5a3f5be54137cdcf6a3002394c1c909cd92de49770bfc4f3ba925121246" Dec 11 10:22:40 crc kubenswrapper[4663]: I1211 10:22:40.860264 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 11 10:22:41 crc kubenswrapper[4663]: I1211 10:22:41.030063 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 11 10:22:41 crc kubenswrapper[4663]: I1211 10:22:41.472910 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-24p95_05009b2f-f136-4f9d-8d58-169816bb1ea6/marketplace-operator/1.log" Dec 11 10:22:41 crc kubenswrapper[4663]: I1211 10:22:41.473292 4663 generic.go:334] "Generic (PLEG): container finished" podID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerID="03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81" exitCode=1 Dec 11 10:22:41 crc kubenswrapper[4663]: I1211 10:22:41.473330 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" event={"ID":"05009b2f-f136-4f9d-8d58-169816bb1ea6","Type":"ContainerDied","Data":"03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81"} Dec 11 10:22:41 crc kubenswrapper[4663]: I1211 10:22:41.473367 4663 scope.go:117] "RemoveContainer" containerID="e4fff5a3f5be54137cdcf6a3002394c1c909cd92de49770bfc4f3ba925121246" Dec 11 10:22:41 crc kubenswrapper[4663]: I1211 10:22:41.473772 4663 scope.go:117] "RemoveContainer" containerID="03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81" Dec 11 10:22:41 crc kubenswrapper[4663]: E1211 10:22:41.474088 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-24p95_openshift-marketplace(05009b2f-f136-4f9d-8d58-169816bb1ea6)\"" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" Dec 11 10:22:42 crc kubenswrapper[4663]: I1211 10:22:42.479082 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-24p95_05009b2f-f136-4f9d-8d58-169816bb1ea6/marketplace-operator/1.log" Dec 11 10:22:43 crc kubenswrapper[4663]: I1211 10:22:43.756087 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 11 10:22:44 crc kubenswrapper[4663]: I1211 10:22:44.404056 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 11 10:22:45 crc kubenswrapper[4663]: I1211 10:22:45.321845 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:22:45 crc kubenswrapper[4663]: I1211 10:22:45.321899 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:22:45 crc kubenswrapper[4663]: I1211 10:22:45.322289 4663 scope.go:117] "RemoveContainer" containerID="03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81" Dec 11 10:22:45 crc kubenswrapper[4663]: E1211 10:22:45.322481 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-24p95_openshift-marketplace(05009b2f-f136-4f9d-8d58-169816bb1ea6)\"" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" Dec 11 10:22:48 crc kubenswrapper[4663]: I1211 10:22:48.430547 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 11 10:22:49 crc kubenswrapper[4663]: I1211 10:22:49.390093 4663 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 11 10:22:50 crc kubenswrapper[4663]: I1211 10:22:50.291807 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 11 10:22:52 crc kubenswrapper[4663]: I1211 10:22:52.390171 4663 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 11 10:22:52 crc kubenswrapper[4663]: I1211 10:22:52.392393 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=76.392371168 podStartE2EDuration="1m16.392371168s" podCreationTimestamp="2025-12-11 10:21:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:21:58.586330976 +0000 UTC m=+249.404260759" watchObservedRunningTime="2025-12-11 10:22:52.392371168 +0000 UTC m=+303.210300952" Dec 11 10:22:52 crc kubenswrapper[4663]: I1211 10:22:52.393876 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 10:22:52 crc kubenswrapper[4663]: I1211 10:22:52.393923 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 10:22:52 crc kubenswrapper[4663]: I1211 10:22:52.397756 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 10:22:52 crc kubenswrapper[4663]: I1211 10:22:52.412841 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=54.412821032 podStartE2EDuration="54.412821032s" podCreationTimestamp="2025-12-11 10:21:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:22:52.409380222 +0000 UTC m=+303.227310006" watchObservedRunningTime="2025-12-11 10:22:52.412821032 +0000 UTC m=+303.230750806" Dec 11 10:22:53 crc kubenswrapper[4663]: I1211 10:22:53.897165 4663 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 10:22:53 crc kubenswrapper[4663]: I1211 10:22:53.897740 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://c7885cbb424652f796c5a1b39ca0729218de783212443ea7aaf706edfb023f6d" gracePeriod=5 Dec 11 10:22:57 crc kubenswrapper[4663]: I1211 10:22:57.506446 4663 scope.go:117] "RemoveContainer" containerID="03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81" Dec 11 10:22:58 crc kubenswrapper[4663]: I1211 10:22:58.563311 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-24p95_05009b2f-f136-4f9d-8d58-169816bb1ea6/marketplace-operator/1.log" Dec 11 10:22:58 crc kubenswrapper[4663]: I1211 10:22:58.563362 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" event={"ID":"05009b2f-f136-4f9d-8d58-169816bb1ea6","Type":"ContainerStarted","Data":"fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102"} Dec 11 10:22:58 crc kubenswrapper[4663]: I1211 10:22:58.564318 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:22:58 crc kubenswrapper[4663]: I1211 10:22:58.566797 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.449167 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.449343 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.511051 4663 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.518524 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.518550 4663 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="5c4ae339-06ca-495c-bcf5-3d95ee92c526" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.521284 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.521309 4663 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="5c4ae339-06ca-495c-bcf5-3d95ee92c526" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.568782 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.568826 4663 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="c7885cbb424652f796c5a1b39ca0729218de783212443ea7aaf706edfb023f6d" exitCode=137 Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.569311 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.569588 4663 scope.go:117] "RemoveContainer" containerID="c7885cbb424652f796c5a1b39ca0729218de783212443ea7aaf706edfb023f6d" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.572785 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.572825 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.572884 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.572932 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.572879 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.572985 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.573001 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.573020 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.573147 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.573261 4663 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.573281 4663 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.573347 4663 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.573360 4663 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.579744 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.582806 4663 scope.go:117] "RemoveContainer" containerID="c7885cbb424652f796c5a1b39ca0729218de783212443ea7aaf706edfb023f6d" Dec 11 10:22:59 crc kubenswrapper[4663]: E1211 10:22:59.583083 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7885cbb424652f796c5a1b39ca0729218de783212443ea7aaf706edfb023f6d\": container with ID starting with c7885cbb424652f796c5a1b39ca0729218de783212443ea7aaf706edfb023f6d not found: ID does not exist" containerID="c7885cbb424652f796c5a1b39ca0729218de783212443ea7aaf706edfb023f6d" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.583112 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7885cbb424652f796c5a1b39ca0729218de783212443ea7aaf706edfb023f6d"} err="failed to get container status \"c7885cbb424652f796c5a1b39ca0729218de783212443ea7aaf706edfb023f6d\": rpc error: code = NotFound desc = could not find container \"c7885cbb424652f796c5a1b39ca0729218de783212443ea7aaf706edfb023f6d\": container with ID starting with c7885cbb424652f796c5a1b39ca0729218de783212443ea7aaf706edfb023f6d not found: ID does not exist" Dec 11 10:22:59 crc kubenswrapper[4663]: I1211 10:22:59.674027 4663 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:01 crc kubenswrapper[4663]: I1211 10:23:01.509710 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.945956 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p4fsq"] Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.946495 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-p4fsq" podUID="8f822dbf-f355-45f9-b293-f1ab29619ed7" containerName="registry-server" containerID="cri-o://50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585" gracePeriod=30 Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.952148 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mlbtj"] Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.952358 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mlbtj" podUID="3b63171f-dadd-465c-98c6-0197a59336a9" containerName="registry-server" containerID="cri-o://674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7" gracePeriod=30 Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.958014 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-24p95"] Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.958244 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerName="marketplace-operator" containerID="cri-o://fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102" gracePeriod=30 Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.963846 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cblmm"] Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.965015 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cblmm" podUID="b4abc7b2-958e-41fb-9bae-b8dd427179e2" containerName="registry-server" containerID="cri-o://d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f" gracePeriod=30 Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.969009 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zwkwc"] Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.969231 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zwkwc" podUID="2d163437-adb7-4c91-b925-72de9fe71120" containerName="registry-server" containerID="cri-o://fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b" gracePeriod=30 Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.979341 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r4j8w"] Dec 11 10:23:02 crc kubenswrapper[4663]: E1211 10:23:02.979551 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" containerName="installer" Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.979569 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" containerName="installer" Dec 11 10:23:02 crc kubenswrapper[4663]: E1211 10:23:02.979593 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.979600 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.979703 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="d500c17b-98ef-4a99-af87-a9cc9db60f04" containerName="installer" Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.979715 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.980079 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:02 crc kubenswrapper[4663]: I1211 10:23:02.992401 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r4j8w"] Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.113098 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgtjs\" (UniqueName: \"kubernetes.io/projected/a6af4642-6ff0-46ed-9ece-b05780059f40-kube-api-access-dgtjs\") pod \"marketplace-operator-79b997595-r4j8w\" (UID: \"a6af4642-6ff0-46ed-9ece-b05780059f40\") " pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.113152 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a6af4642-6ff0-46ed-9ece-b05780059f40-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r4j8w\" (UID: \"a6af4642-6ff0-46ed-9ece-b05780059f40\") " pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.113212 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a6af4642-6ff0-46ed-9ece-b05780059f40-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r4j8w\" (UID: \"a6af4642-6ff0-46ed-9ece-b05780059f40\") " pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.213839 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a6af4642-6ff0-46ed-9ece-b05780059f40-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r4j8w\" (UID: \"a6af4642-6ff0-46ed-9ece-b05780059f40\") " pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.213940 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgtjs\" (UniqueName: \"kubernetes.io/projected/a6af4642-6ff0-46ed-9ece-b05780059f40-kube-api-access-dgtjs\") pod \"marketplace-operator-79b997595-r4j8w\" (UID: \"a6af4642-6ff0-46ed-9ece-b05780059f40\") " pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.213964 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a6af4642-6ff0-46ed-9ece-b05780059f40-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r4j8w\" (UID: \"a6af4642-6ff0-46ed-9ece-b05780059f40\") " pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.215371 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a6af4642-6ff0-46ed-9ece-b05780059f40-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r4j8w\" (UID: \"a6af4642-6ff0-46ed-9ece-b05780059f40\") " pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.218637 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a6af4642-6ff0-46ed-9ece-b05780059f40-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r4j8w\" (UID: \"a6af4642-6ff0-46ed-9ece-b05780059f40\") " pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.227953 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgtjs\" (UniqueName: \"kubernetes.io/projected/a6af4642-6ff0-46ed-9ece-b05780059f40-kube-api-access-dgtjs\") pod \"marketplace-operator-79b997595-r4j8w\" (UID: \"a6af4642-6ff0-46ed-9ece-b05780059f40\") " pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.292494 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.367405 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.369126 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.374348 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-24p95_05009b2f-f136-4f9d-8d58-169816bb1ea6/marketplace-operator/1.log" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.374411 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.380058 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.388798 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.440794 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r4j8w"] Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.516590 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4rjd\" (UniqueName: \"kubernetes.io/projected/8f822dbf-f355-45f9-b293-f1ab29619ed7-kube-api-access-w4rjd\") pod \"8f822dbf-f355-45f9-b293-f1ab29619ed7\" (UID: \"8f822dbf-f355-45f9-b293-f1ab29619ed7\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.516624 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b63171f-dadd-465c-98c6-0197a59336a9-catalog-content\") pod \"3b63171f-dadd-465c-98c6-0197a59336a9\" (UID: \"3b63171f-dadd-465c-98c6-0197a59336a9\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.516644 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwbdm\" (UniqueName: \"kubernetes.io/projected/05009b2f-f136-4f9d-8d58-169816bb1ea6-kube-api-access-jwbdm\") pod \"05009b2f-f136-4f9d-8d58-169816bb1ea6\" (UID: \"05009b2f-f136-4f9d-8d58-169816bb1ea6\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.516663 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/05009b2f-f136-4f9d-8d58-169816bb1ea6-marketplace-operator-metrics\") pod \"05009b2f-f136-4f9d-8d58-169816bb1ea6\" (UID: \"05009b2f-f136-4f9d-8d58-169816bb1ea6\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.516696 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b63171f-dadd-465c-98c6-0197a59336a9-utilities\") pod \"3b63171f-dadd-465c-98c6-0197a59336a9\" (UID: \"3b63171f-dadd-465c-98c6-0197a59336a9\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.516724 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f822dbf-f355-45f9-b293-f1ab29619ed7-catalog-content\") pod \"8f822dbf-f355-45f9-b293-f1ab29619ed7\" (UID: \"8f822dbf-f355-45f9-b293-f1ab29619ed7\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.516742 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d163437-adb7-4c91-b925-72de9fe71120-catalog-content\") pod \"2d163437-adb7-4c91-b925-72de9fe71120\" (UID: \"2d163437-adb7-4c91-b925-72de9fe71120\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.518478 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b63171f-dadd-465c-98c6-0197a59336a9-utilities" (OuterVolumeSpecName: "utilities") pod "3b63171f-dadd-465c-98c6-0197a59336a9" (UID: "3b63171f-dadd-465c-98c6-0197a59336a9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.518762 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4abc7b2-958e-41fb-9bae-b8dd427179e2-catalog-content\") pod \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\" (UID: \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.518834 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ds4bk\" (UniqueName: \"kubernetes.io/projected/2d163437-adb7-4c91-b925-72de9fe71120-kube-api-access-ds4bk\") pod \"2d163437-adb7-4c91-b925-72de9fe71120\" (UID: \"2d163437-adb7-4c91-b925-72de9fe71120\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.518861 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05009b2f-f136-4f9d-8d58-169816bb1ea6-marketplace-trusted-ca\") pod \"05009b2f-f136-4f9d-8d58-169816bb1ea6\" (UID: \"05009b2f-f136-4f9d-8d58-169816bb1ea6\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.518897 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d163437-adb7-4c91-b925-72de9fe71120-utilities\") pod \"2d163437-adb7-4c91-b925-72de9fe71120\" (UID: \"2d163437-adb7-4c91-b925-72de9fe71120\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.518910 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-974kp\" (UniqueName: \"kubernetes.io/projected/3b63171f-dadd-465c-98c6-0197a59336a9-kube-api-access-974kp\") pod \"3b63171f-dadd-465c-98c6-0197a59336a9\" (UID: \"3b63171f-dadd-465c-98c6-0197a59336a9\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.519411 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05009b2f-f136-4f9d-8d58-169816bb1ea6-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "05009b2f-f136-4f9d-8d58-169816bb1ea6" (UID: "05009b2f-f136-4f9d-8d58-169816bb1ea6"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.519828 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d163437-adb7-4c91-b925-72de9fe71120-utilities" (OuterVolumeSpecName: "utilities") pod "2d163437-adb7-4c91-b925-72de9fe71120" (UID: "2d163437-adb7-4c91-b925-72de9fe71120"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.520142 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05009b2f-f136-4f9d-8d58-169816bb1ea6-kube-api-access-jwbdm" (OuterVolumeSpecName: "kube-api-access-jwbdm") pod "05009b2f-f136-4f9d-8d58-169816bb1ea6" (UID: "05009b2f-f136-4f9d-8d58-169816bb1ea6"). InnerVolumeSpecName "kube-api-access-jwbdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.520482 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f822dbf-f355-45f9-b293-f1ab29619ed7-kube-api-access-w4rjd" (OuterVolumeSpecName: "kube-api-access-w4rjd") pod "8f822dbf-f355-45f9-b293-f1ab29619ed7" (UID: "8f822dbf-f355-45f9-b293-f1ab29619ed7"). InnerVolumeSpecName "kube-api-access-w4rjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.520728 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05009b2f-f136-4f9d-8d58-169816bb1ea6-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "05009b2f-f136-4f9d-8d58-169816bb1ea6" (UID: "05009b2f-f136-4f9d-8d58-169816bb1ea6"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.521904 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4abc7b2-958e-41fb-9bae-b8dd427179e2-utilities\") pod \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\" (UID: \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.522155 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f822dbf-f355-45f9-b293-f1ab29619ed7-utilities\") pod \"8f822dbf-f355-45f9-b293-f1ab29619ed7\" (UID: \"8f822dbf-f355-45f9-b293-f1ab29619ed7\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.522206 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpccm\" (UniqueName: \"kubernetes.io/projected/b4abc7b2-958e-41fb-9bae-b8dd427179e2-kube-api-access-xpccm\") pod \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\" (UID: \"b4abc7b2-958e-41fb-9bae-b8dd427179e2\") " Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.522444 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4rjd\" (UniqueName: \"kubernetes.io/projected/8f822dbf-f355-45f9-b293-f1ab29619ed7-kube-api-access-w4rjd\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.522456 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwbdm\" (UniqueName: \"kubernetes.io/projected/05009b2f-f136-4f9d-8d58-169816bb1ea6-kube-api-access-jwbdm\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.522465 4663 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/05009b2f-f136-4f9d-8d58-169816bb1ea6-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.522474 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b63171f-dadd-465c-98c6-0197a59336a9-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.522481 4663 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05009b2f-f136-4f9d-8d58-169816bb1ea6-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.522488 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d163437-adb7-4c91-b925-72de9fe71120-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.522755 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f822dbf-f355-45f9-b293-f1ab29619ed7-utilities" (OuterVolumeSpecName: "utilities") pod "8f822dbf-f355-45f9-b293-f1ab29619ed7" (UID: "8f822dbf-f355-45f9-b293-f1ab29619ed7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.523206 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4abc7b2-958e-41fb-9bae-b8dd427179e2-utilities" (OuterVolumeSpecName: "utilities") pod "b4abc7b2-958e-41fb-9bae-b8dd427179e2" (UID: "b4abc7b2-958e-41fb-9bae-b8dd427179e2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.524844 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d163437-adb7-4c91-b925-72de9fe71120-kube-api-access-ds4bk" (OuterVolumeSpecName: "kube-api-access-ds4bk") pod "2d163437-adb7-4c91-b925-72de9fe71120" (UID: "2d163437-adb7-4c91-b925-72de9fe71120"). InnerVolumeSpecName "kube-api-access-ds4bk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.525815 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b63171f-dadd-465c-98c6-0197a59336a9-kube-api-access-974kp" (OuterVolumeSpecName: "kube-api-access-974kp") pod "3b63171f-dadd-465c-98c6-0197a59336a9" (UID: "3b63171f-dadd-465c-98c6-0197a59336a9"). InnerVolumeSpecName "kube-api-access-974kp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.526931 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4abc7b2-958e-41fb-9bae-b8dd427179e2-kube-api-access-xpccm" (OuterVolumeSpecName: "kube-api-access-xpccm") pod "b4abc7b2-958e-41fb-9bae-b8dd427179e2" (UID: "b4abc7b2-958e-41fb-9bae-b8dd427179e2"). InnerVolumeSpecName "kube-api-access-xpccm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.542409 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4abc7b2-958e-41fb-9bae-b8dd427179e2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4abc7b2-958e-41fb-9bae-b8dd427179e2" (UID: "b4abc7b2-958e-41fb-9bae-b8dd427179e2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.573419 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b63171f-dadd-465c-98c6-0197a59336a9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3b63171f-dadd-465c-98c6-0197a59336a9" (UID: "3b63171f-dadd-465c-98c6-0197a59336a9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.576057 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f822dbf-f355-45f9-b293-f1ab29619ed7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8f822dbf-f355-45f9-b293-f1ab29619ed7" (UID: "8f822dbf-f355-45f9-b293-f1ab29619ed7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.589168 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-24p95_05009b2f-f136-4f9d-8d58-169816bb1ea6/marketplace-operator/1.log" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.589236 4663 generic.go:334] "Generic (PLEG): container finished" podID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerID="fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102" exitCode=0 Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.589295 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" event={"ID":"05009b2f-f136-4f9d-8d58-169816bb1ea6","Type":"ContainerDied","Data":"fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102"} Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.589311 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.589336 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-24p95" event={"ID":"05009b2f-f136-4f9d-8d58-169816bb1ea6","Type":"ContainerDied","Data":"52015d20c4bd3ea97be9512c68b935b2ae9290ae3845b72f175f5bd7a4638424"} Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.589364 4663 scope.go:117] "RemoveContainer" containerID="fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.592079 4663 generic.go:334] "Generic (PLEG): container finished" podID="3b63171f-dadd-465c-98c6-0197a59336a9" containerID="674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7" exitCode=0 Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.592119 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mlbtj" event={"ID":"3b63171f-dadd-465c-98c6-0197a59336a9","Type":"ContainerDied","Data":"674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7"} Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.592144 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mlbtj" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.592161 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mlbtj" event={"ID":"3b63171f-dadd-465c-98c6-0197a59336a9","Type":"ContainerDied","Data":"e80892a168182d6cd7cfc7ecb53605d2b4e3389551bb6eaaaa54ef1996ba4819"} Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.593785 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" event={"ID":"a6af4642-6ff0-46ed-9ece-b05780059f40","Type":"ContainerStarted","Data":"e8b1d357687ae518a712d651bed1fcbd233aff820ce257e5333a5b539c8ae804"} Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.593844 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" event={"ID":"a6af4642-6ff0-46ed-9ece-b05780059f40","Type":"ContainerStarted","Data":"d2e5827a823de19b26766ccfed0b99f0e653a84aedb76a199a59dfe5e71da1a8"} Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.593927 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.595953 4663 generic.go:334] "Generic (PLEG): container finished" podID="b4abc7b2-958e-41fb-9bae-b8dd427179e2" containerID="d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f" exitCode=0 Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.596023 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cblmm" event={"ID":"b4abc7b2-958e-41fb-9bae-b8dd427179e2","Type":"ContainerDied","Data":"d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f"} Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.596058 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cblmm" event={"ID":"b4abc7b2-958e-41fb-9bae-b8dd427179e2","Type":"ContainerDied","Data":"e089992e85b0c452ff2404ea8576fa96b2cfee6a82c5084c7d0d83fb561157e0"} Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.596282 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cblmm" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.596388 4663 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-r4j8w container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" start-of-body= Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.596444 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" podUID="a6af4642-6ff0-46ed-9ece-b05780059f40" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.598752 4663 generic.go:334] "Generic (PLEG): container finished" podID="2d163437-adb7-4c91-b925-72de9fe71120" containerID="fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b" exitCode=0 Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.598762 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zwkwc" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.598866 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwkwc" event={"ID":"2d163437-adb7-4c91-b925-72de9fe71120","Type":"ContainerDied","Data":"fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b"} Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.598999 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwkwc" event={"ID":"2d163437-adb7-4c91-b925-72de9fe71120","Type":"ContainerDied","Data":"1c0ba465bd0a58bea91ab523ddef6169a1eac7ffabea7adcc3ff73e51dfb4780"} Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.602244 4663 generic.go:334] "Generic (PLEG): container finished" podID="8f822dbf-f355-45f9-b293-f1ab29619ed7" containerID="50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585" exitCode=0 Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.602277 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4fsq" event={"ID":"8f822dbf-f355-45f9-b293-f1ab29619ed7","Type":"ContainerDied","Data":"50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585"} Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.602314 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4fsq" event={"ID":"8f822dbf-f355-45f9-b293-f1ab29619ed7","Type":"ContainerDied","Data":"3e90f4614ee3644de5f25432cf2fc9e0bb8ca73c59c2bb7eb230069ea917fb64"} Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.602399 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p4fsq" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.606431 4663 scope.go:117] "RemoveContainer" containerID="03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.620757 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" podStartSLOduration=1.620745045 podStartE2EDuration="1.620745045s" podCreationTimestamp="2025-12-11 10:23:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:23:03.619131427 +0000 UTC m=+314.437061211" watchObservedRunningTime="2025-12-11 10:23:03.620745045 +0000 UTC m=+314.438674829" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.623823 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-974kp\" (UniqueName: \"kubernetes.io/projected/3b63171f-dadd-465c-98c6-0197a59336a9-kube-api-access-974kp\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.623877 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4abc7b2-958e-41fb-9bae-b8dd427179e2-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.623893 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f822dbf-f355-45f9-b293-f1ab29619ed7-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.623903 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpccm\" (UniqueName: \"kubernetes.io/projected/b4abc7b2-958e-41fb-9bae-b8dd427179e2-kube-api-access-xpccm\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.623912 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b63171f-dadd-465c-98c6-0197a59336a9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.623920 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f822dbf-f355-45f9-b293-f1ab29619ed7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.623948 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4abc7b2-958e-41fb-9bae-b8dd427179e2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.623961 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ds4bk\" (UniqueName: \"kubernetes.io/projected/2d163437-adb7-4c91-b925-72de9fe71120-kube-api-access-ds4bk\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.633989 4663 scope.go:117] "RemoveContainer" containerID="fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.634423 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102\": container with ID starting with fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102 not found: ID does not exist" containerID="fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.634459 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102"} err="failed to get container status \"fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102\": rpc error: code = NotFound desc = could not find container \"fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102\": container with ID starting with fb5df3e012f798dbcac14ae5a4897657eee0a8fb7864955e9bb8288f10b80102 not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.634504 4663 scope.go:117] "RemoveContainer" containerID="03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.634758 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81\": container with ID starting with 03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81 not found: ID does not exist" containerID="03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.634789 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81"} err="failed to get container status \"03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81\": rpc error: code = NotFound desc = could not find container \"03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81\": container with ID starting with 03b032c66008c4ebcbd02dd2cfe1f01f8d51449c094f7df1ad754fbb1d0eff81 not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.634814 4663 scope.go:117] "RemoveContainer" containerID="674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.640545 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mlbtj"] Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.645984 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mlbtj"] Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.648412 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d163437-adb7-4c91-b925-72de9fe71120-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2d163437-adb7-4c91-b925-72de9fe71120" (UID: "2d163437-adb7-4c91-b925-72de9fe71120"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.649405 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-24p95"] Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.650790 4663 scope.go:117] "RemoveContainer" containerID="6bbf25dad89e75e6a742bc74737f94dbb54578cf34f9c05fe5078f0570d665d6" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.653325 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-24p95"] Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.655988 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cblmm"] Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.658495 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cblmm"] Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.660897 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p4fsq"] Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.663031 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-p4fsq"] Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.668834 4663 scope.go:117] "RemoveContainer" containerID="616eb56f042616855a8b1acc6bc564c965fec62d8e8dcb55a2d1cc4bab59e4b6" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.680780 4663 scope.go:117] "RemoveContainer" containerID="674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.681051 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7\": container with ID starting with 674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7 not found: ID does not exist" containerID="674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.681085 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7"} err="failed to get container status \"674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7\": rpc error: code = NotFound desc = could not find container \"674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7\": container with ID starting with 674008b10c25f6ec7be1f81178a5bbb9a30dfb2c960f3943abbecedd370035f7 not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.681111 4663 scope.go:117] "RemoveContainer" containerID="6bbf25dad89e75e6a742bc74737f94dbb54578cf34f9c05fe5078f0570d665d6" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.681429 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bbf25dad89e75e6a742bc74737f94dbb54578cf34f9c05fe5078f0570d665d6\": container with ID starting with 6bbf25dad89e75e6a742bc74737f94dbb54578cf34f9c05fe5078f0570d665d6 not found: ID does not exist" containerID="6bbf25dad89e75e6a742bc74737f94dbb54578cf34f9c05fe5078f0570d665d6" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.681463 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bbf25dad89e75e6a742bc74737f94dbb54578cf34f9c05fe5078f0570d665d6"} err="failed to get container status \"6bbf25dad89e75e6a742bc74737f94dbb54578cf34f9c05fe5078f0570d665d6\": rpc error: code = NotFound desc = could not find container \"6bbf25dad89e75e6a742bc74737f94dbb54578cf34f9c05fe5078f0570d665d6\": container with ID starting with 6bbf25dad89e75e6a742bc74737f94dbb54578cf34f9c05fe5078f0570d665d6 not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.681486 4663 scope.go:117] "RemoveContainer" containerID="616eb56f042616855a8b1acc6bc564c965fec62d8e8dcb55a2d1cc4bab59e4b6" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.681699 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"616eb56f042616855a8b1acc6bc564c965fec62d8e8dcb55a2d1cc4bab59e4b6\": container with ID starting with 616eb56f042616855a8b1acc6bc564c965fec62d8e8dcb55a2d1cc4bab59e4b6 not found: ID does not exist" containerID="616eb56f042616855a8b1acc6bc564c965fec62d8e8dcb55a2d1cc4bab59e4b6" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.681721 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"616eb56f042616855a8b1acc6bc564c965fec62d8e8dcb55a2d1cc4bab59e4b6"} err="failed to get container status \"616eb56f042616855a8b1acc6bc564c965fec62d8e8dcb55a2d1cc4bab59e4b6\": rpc error: code = NotFound desc = could not find container \"616eb56f042616855a8b1acc6bc564c965fec62d8e8dcb55a2d1cc4bab59e4b6\": container with ID starting with 616eb56f042616855a8b1acc6bc564c965fec62d8e8dcb55a2d1cc4bab59e4b6 not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.681736 4663 scope.go:117] "RemoveContainer" containerID="d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.691318 4663 scope.go:117] "RemoveContainer" containerID="e518db8bb39eb363a88b430652e007bb6f29a0c13c177b5af52047eb6615c06c" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.701693 4663 scope.go:117] "RemoveContainer" containerID="ded69ee3b1c21490ba5c44288cc79dec1c8dad1868c89baac459c59098c0b15b" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.712258 4663 scope.go:117] "RemoveContainer" containerID="d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.713350 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f\": container with ID starting with d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f not found: ID does not exist" containerID="d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.713401 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f"} err="failed to get container status \"d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f\": rpc error: code = NotFound desc = could not find container \"d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f\": container with ID starting with d1214d98544cf6be7af6172ab70ca47b50dc60e1dac2b372499e6ec622c9524f not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.713430 4663 scope.go:117] "RemoveContainer" containerID="e518db8bb39eb363a88b430652e007bb6f29a0c13c177b5af52047eb6615c06c" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.713749 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e518db8bb39eb363a88b430652e007bb6f29a0c13c177b5af52047eb6615c06c\": container with ID starting with e518db8bb39eb363a88b430652e007bb6f29a0c13c177b5af52047eb6615c06c not found: ID does not exist" containerID="e518db8bb39eb363a88b430652e007bb6f29a0c13c177b5af52047eb6615c06c" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.713781 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e518db8bb39eb363a88b430652e007bb6f29a0c13c177b5af52047eb6615c06c"} err="failed to get container status \"e518db8bb39eb363a88b430652e007bb6f29a0c13c177b5af52047eb6615c06c\": rpc error: code = NotFound desc = could not find container \"e518db8bb39eb363a88b430652e007bb6f29a0c13c177b5af52047eb6615c06c\": container with ID starting with e518db8bb39eb363a88b430652e007bb6f29a0c13c177b5af52047eb6615c06c not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.713804 4663 scope.go:117] "RemoveContainer" containerID="ded69ee3b1c21490ba5c44288cc79dec1c8dad1868c89baac459c59098c0b15b" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.714036 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ded69ee3b1c21490ba5c44288cc79dec1c8dad1868c89baac459c59098c0b15b\": container with ID starting with ded69ee3b1c21490ba5c44288cc79dec1c8dad1868c89baac459c59098c0b15b not found: ID does not exist" containerID="ded69ee3b1c21490ba5c44288cc79dec1c8dad1868c89baac459c59098c0b15b" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.714063 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ded69ee3b1c21490ba5c44288cc79dec1c8dad1868c89baac459c59098c0b15b"} err="failed to get container status \"ded69ee3b1c21490ba5c44288cc79dec1c8dad1868c89baac459c59098c0b15b\": rpc error: code = NotFound desc = could not find container \"ded69ee3b1c21490ba5c44288cc79dec1c8dad1868c89baac459c59098c0b15b\": container with ID starting with ded69ee3b1c21490ba5c44288cc79dec1c8dad1868c89baac459c59098c0b15b not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.714085 4663 scope.go:117] "RemoveContainer" containerID="fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.725122 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d163437-adb7-4c91-b925-72de9fe71120-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.725161 4663 scope.go:117] "RemoveContainer" containerID="85baf9b9531d2056d4d938214bebce24fa3783d64510209a7231e8271512f1dc" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.743275 4663 scope.go:117] "RemoveContainer" containerID="03a5adacffb6b69d3c7f0448489abe42ed6828ee2e2524aa761d2d3e924a20b2" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.757692 4663 scope.go:117] "RemoveContainer" containerID="fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.758906 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b\": container with ID starting with fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b not found: ID does not exist" containerID="fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.758967 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b"} err="failed to get container status \"fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b\": rpc error: code = NotFound desc = could not find container \"fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b\": container with ID starting with fbe76f45b74391fcad78a171da9c6b358c0f3d07deba00d2cc7729fbc917138b not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.758993 4663 scope.go:117] "RemoveContainer" containerID="85baf9b9531d2056d4d938214bebce24fa3783d64510209a7231e8271512f1dc" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.759371 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85baf9b9531d2056d4d938214bebce24fa3783d64510209a7231e8271512f1dc\": container with ID starting with 85baf9b9531d2056d4d938214bebce24fa3783d64510209a7231e8271512f1dc not found: ID does not exist" containerID="85baf9b9531d2056d4d938214bebce24fa3783d64510209a7231e8271512f1dc" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.759410 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85baf9b9531d2056d4d938214bebce24fa3783d64510209a7231e8271512f1dc"} err="failed to get container status \"85baf9b9531d2056d4d938214bebce24fa3783d64510209a7231e8271512f1dc\": rpc error: code = NotFound desc = could not find container \"85baf9b9531d2056d4d938214bebce24fa3783d64510209a7231e8271512f1dc\": container with ID starting with 85baf9b9531d2056d4d938214bebce24fa3783d64510209a7231e8271512f1dc not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.759424 4663 scope.go:117] "RemoveContainer" containerID="03a5adacffb6b69d3c7f0448489abe42ed6828ee2e2524aa761d2d3e924a20b2" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.759765 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03a5adacffb6b69d3c7f0448489abe42ed6828ee2e2524aa761d2d3e924a20b2\": container with ID starting with 03a5adacffb6b69d3c7f0448489abe42ed6828ee2e2524aa761d2d3e924a20b2 not found: ID does not exist" containerID="03a5adacffb6b69d3c7f0448489abe42ed6828ee2e2524aa761d2d3e924a20b2" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.759787 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03a5adacffb6b69d3c7f0448489abe42ed6828ee2e2524aa761d2d3e924a20b2"} err="failed to get container status \"03a5adacffb6b69d3c7f0448489abe42ed6828ee2e2524aa761d2d3e924a20b2\": rpc error: code = NotFound desc = could not find container \"03a5adacffb6b69d3c7f0448489abe42ed6828ee2e2524aa761d2d3e924a20b2\": container with ID starting with 03a5adacffb6b69d3c7f0448489abe42ed6828ee2e2524aa761d2d3e924a20b2 not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.759818 4663 scope.go:117] "RemoveContainer" containerID="50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.770059 4663 scope.go:117] "RemoveContainer" containerID="f2bc57cc9591bd870efed63b1fd71c8fa88298d9f81fe5d08b35aa53dab76e37" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.782117 4663 scope.go:117] "RemoveContainer" containerID="3c2bbae7fab8c3c96ddfa5b95e85778053eef81b80831de9512d539e4eed0bbd" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.792134 4663 scope.go:117] "RemoveContainer" containerID="50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.793254 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585\": container with ID starting with 50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585 not found: ID does not exist" containerID="50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.793291 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585"} err="failed to get container status \"50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585\": rpc error: code = NotFound desc = could not find container \"50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585\": container with ID starting with 50d8e84816e980eb349665cafcd9336f6e65d956f914b2470e8e0d56b4b2f585 not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.793315 4663 scope.go:117] "RemoveContainer" containerID="f2bc57cc9591bd870efed63b1fd71c8fa88298d9f81fe5d08b35aa53dab76e37" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.793592 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2bc57cc9591bd870efed63b1fd71c8fa88298d9f81fe5d08b35aa53dab76e37\": container with ID starting with f2bc57cc9591bd870efed63b1fd71c8fa88298d9f81fe5d08b35aa53dab76e37 not found: ID does not exist" containerID="f2bc57cc9591bd870efed63b1fd71c8fa88298d9f81fe5d08b35aa53dab76e37" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.793614 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2bc57cc9591bd870efed63b1fd71c8fa88298d9f81fe5d08b35aa53dab76e37"} err="failed to get container status \"f2bc57cc9591bd870efed63b1fd71c8fa88298d9f81fe5d08b35aa53dab76e37\": rpc error: code = NotFound desc = could not find container \"f2bc57cc9591bd870efed63b1fd71c8fa88298d9f81fe5d08b35aa53dab76e37\": container with ID starting with f2bc57cc9591bd870efed63b1fd71c8fa88298d9f81fe5d08b35aa53dab76e37 not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.793629 4663 scope.go:117] "RemoveContainer" containerID="3c2bbae7fab8c3c96ddfa5b95e85778053eef81b80831de9512d539e4eed0bbd" Dec 11 10:23:03 crc kubenswrapper[4663]: E1211 10:23:03.793833 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c2bbae7fab8c3c96ddfa5b95e85778053eef81b80831de9512d539e4eed0bbd\": container with ID starting with 3c2bbae7fab8c3c96ddfa5b95e85778053eef81b80831de9512d539e4eed0bbd not found: ID does not exist" containerID="3c2bbae7fab8c3c96ddfa5b95e85778053eef81b80831de9512d539e4eed0bbd" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.793853 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c2bbae7fab8c3c96ddfa5b95e85778053eef81b80831de9512d539e4eed0bbd"} err="failed to get container status \"3c2bbae7fab8c3c96ddfa5b95e85778053eef81b80831de9512d539e4eed0bbd\": rpc error: code = NotFound desc = could not find container \"3c2bbae7fab8c3c96ddfa5b95e85778053eef81b80831de9512d539e4eed0bbd\": container with ID starting with 3c2bbae7fab8c3c96ddfa5b95e85778053eef81b80831de9512d539e4eed0bbd not found: ID does not exist" Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.920866 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zwkwc"] Dec 11 10:23:03 crc kubenswrapper[4663]: I1211 10:23:03.924143 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zwkwc"] Dec 11 10:23:04 crc kubenswrapper[4663]: I1211 10:23:04.613987 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-r4j8w" Dec 11 10:23:05 crc kubenswrapper[4663]: I1211 10:23:05.510238 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" path="/var/lib/kubelet/pods/05009b2f-f136-4f9d-8d58-169816bb1ea6/volumes" Dec 11 10:23:05 crc kubenswrapper[4663]: I1211 10:23:05.510927 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d163437-adb7-4c91-b925-72de9fe71120" path="/var/lib/kubelet/pods/2d163437-adb7-4c91-b925-72de9fe71120/volumes" Dec 11 10:23:05 crc kubenswrapper[4663]: I1211 10:23:05.511537 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b63171f-dadd-465c-98c6-0197a59336a9" path="/var/lib/kubelet/pods/3b63171f-dadd-465c-98c6-0197a59336a9/volumes" Dec 11 10:23:05 crc kubenswrapper[4663]: I1211 10:23:05.512119 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f822dbf-f355-45f9-b293-f1ab29619ed7" path="/var/lib/kubelet/pods/8f822dbf-f355-45f9-b293-f1ab29619ed7/volumes" Dec 11 10:23:05 crc kubenswrapper[4663]: I1211 10:23:05.512709 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4abc7b2-958e-41fb-9bae-b8dd427179e2" path="/var/lib/kubelet/pods/b4abc7b2-958e-41fb-9bae-b8dd427179e2/volumes" Dec 11 10:23:19 crc kubenswrapper[4663]: I1211 10:23:19.271619 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:23:19 crc kubenswrapper[4663]: I1211 10:23:19.272139 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:23:29 crc kubenswrapper[4663]: I1211 10:23:29.762872 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-n6dwq"] Dec 11 10:23:29 crc kubenswrapper[4663]: I1211 10:23:29.763503 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" podUID="14f276e8-14d3-4e67-956d-ad6212f45c69" containerName="controller-manager" containerID="cri-o://a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97" gracePeriod=30 Dec 11 10:23:29 crc kubenswrapper[4663]: I1211 10:23:29.871660 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k"] Dec 11 10:23:29 crc kubenswrapper[4663]: I1211 10:23:29.871843 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" podUID="c7f23b6b-b7a1-46f4-b553-37be88f8fae0" containerName="route-controller-manager" containerID="cri-o://9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39" gracePeriod=30 Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.081843 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.167903 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.269034 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14f276e8-14d3-4e67-956d-ad6212f45c69-serving-cert\") pod \"14f276e8-14d3-4e67-956d-ad6212f45c69\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.269084 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-config\") pod \"14f276e8-14d3-4e67-956d-ad6212f45c69\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.269117 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6rl2\" (UniqueName: \"kubernetes.io/projected/14f276e8-14d3-4e67-956d-ad6212f45c69-kube-api-access-x6rl2\") pod \"14f276e8-14d3-4e67-956d-ad6212f45c69\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.269154 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-client-ca\") pod \"14f276e8-14d3-4e67-956d-ad6212f45c69\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.269232 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-serving-cert\") pod \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.269272 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdpvq\" (UniqueName: \"kubernetes.io/projected/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-kube-api-access-jdpvq\") pod \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.269301 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-proxy-ca-bundles\") pod \"14f276e8-14d3-4e67-956d-ad6212f45c69\" (UID: \"14f276e8-14d3-4e67-956d-ad6212f45c69\") " Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.269342 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-client-ca\") pod \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.269364 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-config\") pod \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\" (UID: \"c7f23b6b-b7a1-46f4-b553-37be88f8fae0\") " Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.270798 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-client-ca" (OuterVolumeSpecName: "client-ca") pod "14f276e8-14d3-4e67-956d-ad6212f45c69" (UID: "14f276e8-14d3-4e67-956d-ad6212f45c69"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.270798 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-client-ca" (OuterVolumeSpecName: "client-ca") pod "c7f23b6b-b7a1-46f4-b553-37be88f8fae0" (UID: "c7f23b6b-b7a1-46f4-b553-37be88f8fae0"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.271102 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-config" (OuterVolumeSpecName: "config") pod "14f276e8-14d3-4e67-956d-ad6212f45c69" (UID: "14f276e8-14d3-4e67-956d-ad6212f45c69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.271545 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-config" (OuterVolumeSpecName: "config") pod "c7f23b6b-b7a1-46f4-b553-37be88f8fae0" (UID: "c7f23b6b-b7a1-46f4-b553-37be88f8fae0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.271631 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "14f276e8-14d3-4e67-956d-ad6212f45c69" (UID: "14f276e8-14d3-4e67-956d-ad6212f45c69"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.273438 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.273462 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.273476 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.276458 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14f276e8-14d3-4e67-956d-ad6212f45c69-kube-api-access-x6rl2" (OuterVolumeSpecName: "kube-api-access-x6rl2") pod "14f276e8-14d3-4e67-956d-ad6212f45c69" (UID: "14f276e8-14d3-4e67-956d-ad6212f45c69"). InnerVolumeSpecName "kube-api-access-x6rl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.276680 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14f276e8-14d3-4e67-956d-ad6212f45c69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "14f276e8-14d3-4e67-956d-ad6212f45c69" (UID: "14f276e8-14d3-4e67-956d-ad6212f45c69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.277133 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-kube-api-access-jdpvq" (OuterVolumeSpecName: "kube-api-access-jdpvq") pod "c7f23b6b-b7a1-46f4-b553-37be88f8fae0" (UID: "c7f23b6b-b7a1-46f4-b553-37be88f8fae0"). InnerVolumeSpecName "kube-api-access-jdpvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.280444 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c7f23b6b-b7a1-46f4-b553-37be88f8fae0" (UID: "c7f23b6b-b7a1-46f4-b553-37be88f8fae0"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.375141 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdpvq\" (UniqueName: \"kubernetes.io/projected/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-kube-api-access-jdpvq\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.375221 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/14f276e8-14d3-4e67-956d-ad6212f45c69-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.375235 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.375245 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14f276e8-14d3-4e67-956d-ad6212f45c69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.375257 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6rl2\" (UniqueName: \"kubernetes.io/projected/14f276e8-14d3-4e67-956d-ad6212f45c69-kube-api-access-x6rl2\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.375265 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7f23b6b-b7a1-46f4-b553-37be88f8fae0-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.748573 4663 generic.go:334] "Generic (PLEG): container finished" podID="c7f23b6b-b7a1-46f4-b553-37be88f8fae0" containerID="9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39" exitCode=0 Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.748854 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" event={"ID":"c7f23b6b-b7a1-46f4-b553-37be88f8fae0","Type":"ContainerDied","Data":"9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39"} Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.748945 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" event={"ID":"c7f23b6b-b7a1-46f4-b553-37be88f8fae0","Type":"ContainerDied","Data":"1eca88383e729bb647bdefa3cc8b092b597bcefabd92106fda0d7dd4accbf3ac"} Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.749012 4663 scope.go:117] "RemoveContainer" containerID="9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.749158 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.750925 4663 generic.go:334] "Generic (PLEG): container finished" podID="14f276e8-14d3-4e67-956d-ad6212f45c69" containerID="a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97" exitCode=0 Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.750964 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" event={"ID":"14f276e8-14d3-4e67-956d-ad6212f45c69","Type":"ContainerDied","Data":"a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97"} Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.750989 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" event={"ID":"14f276e8-14d3-4e67-956d-ad6212f45c69","Type":"ContainerDied","Data":"4ddcddc85dfecf9912c23f042a2d1e6d6f92b3e656be1bc0d9d90b8e72fcf57a"} Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.751041 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-n6dwq" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.764572 4663 scope.go:117] "RemoveContainer" containerID="9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39" Dec 11 10:23:30 crc kubenswrapper[4663]: E1211 10:23:30.765258 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39\": container with ID starting with 9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39 not found: ID does not exist" containerID="9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.765399 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39"} err="failed to get container status \"9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39\": rpc error: code = NotFound desc = could not find container \"9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39\": container with ID starting with 9193f5babeee3d67a69eea3a5073c716b8f3e77a5b6658c11649300055956d39 not found: ID does not exist" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.765499 4663 scope.go:117] "RemoveContainer" containerID="a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.777278 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-n6dwq"] Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.779131 4663 scope.go:117] "RemoveContainer" containerID="a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.779946 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-n6dwq"] Dec 11 10:23:30 crc kubenswrapper[4663]: E1211 10:23:30.779943 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97\": container with ID starting with a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97 not found: ID does not exist" containerID="a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.780243 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97"} err="failed to get container status \"a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97\": rpc error: code = NotFound desc = could not find container \"a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97\": container with ID starting with a90d628be04a58f13d73868ef108fa539edffedddcaf0dcaccd3e370533d5e97 not found: ID does not exist" Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.787529 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k"] Dec 11 10:23:30 crc kubenswrapper[4663]: I1211 10:23:30.791373 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvj6k"] Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.510941 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14f276e8-14d3-4e67-956d-ad6212f45c69" path="/var/lib/kubelet/pods/14f276e8-14d3-4e67-956d-ad6212f45c69/volumes" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.511597 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7f23b6b-b7a1-46f4-b553-37be88f8fae0" path="/var/lib/kubelet/pods/c7f23b6b-b7a1-46f4-b553-37be88f8fae0/volumes" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597081 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-598844dbc9-fmphp"] Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597320 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7f23b6b-b7a1-46f4-b553-37be88f8fae0" containerName="route-controller-manager" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597339 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7f23b6b-b7a1-46f4-b553-37be88f8fae0" containerName="route-controller-manager" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597351 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f822dbf-f355-45f9-b293-f1ab29619ed7" containerName="extract-content" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597357 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f822dbf-f355-45f9-b293-f1ab29619ed7" containerName="extract-content" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597365 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14f276e8-14d3-4e67-956d-ad6212f45c69" containerName="controller-manager" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597371 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="14f276e8-14d3-4e67-956d-ad6212f45c69" containerName="controller-manager" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597378 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b63171f-dadd-465c-98c6-0197a59336a9" containerName="registry-server" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597384 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b63171f-dadd-465c-98c6-0197a59336a9" containerName="registry-server" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597392 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d163437-adb7-4c91-b925-72de9fe71120" containerName="registry-server" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597397 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d163437-adb7-4c91-b925-72de9fe71120" containerName="registry-server" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597404 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4abc7b2-958e-41fb-9bae-b8dd427179e2" containerName="extract-utilities" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597410 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4abc7b2-958e-41fb-9bae-b8dd427179e2" containerName="extract-utilities" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597417 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerName="marketplace-operator" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597422 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerName="marketplace-operator" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597429 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d163437-adb7-4c91-b925-72de9fe71120" containerName="extract-content" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597435 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d163437-adb7-4c91-b925-72de9fe71120" containerName="extract-content" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597443 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4abc7b2-958e-41fb-9bae-b8dd427179e2" containerName="extract-content" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597448 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4abc7b2-958e-41fb-9bae-b8dd427179e2" containerName="extract-content" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597458 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerName="marketplace-operator" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597463 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerName="marketplace-operator" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597469 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4abc7b2-958e-41fb-9bae-b8dd427179e2" containerName="registry-server" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597474 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4abc7b2-958e-41fb-9bae-b8dd427179e2" containerName="registry-server" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597481 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f822dbf-f355-45f9-b293-f1ab29619ed7" containerName="registry-server" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597486 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f822dbf-f355-45f9-b293-f1ab29619ed7" containerName="registry-server" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597494 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b63171f-dadd-465c-98c6-0197a59336a9" containerName="extract-utilities" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597501 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b63171f-dadd-465c-98c6-0197a59336a9" containerName="extract-utilities" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597510 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b63171f-dadd-465c-98c6-0197a59336a9" containerName="extract-content" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597515 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b63171f-dadd-465c-98c6-0197a59336a9" containerName="extract-content" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597522 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d163437-adb7-4c91-b925-72de9fe71120" containerName="extract-utilities" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597527 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d163437-adb7-4c91-b925-72de9fe71120" containerName="extract-utilities" Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.597533 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f822dbf-f355-45f9-b293-f1ab29619ed7" containerName="extract-utilities" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597537 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f822dbf-f355-45f9-b293-f1ab29619ed7" containerName="extract-utilities" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597634 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerName="marketplace-operator" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597645 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerName="marketplace-operator" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597652 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d163437-adb7-4c91-b925-72de9fe71120" containerName="registry-server" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597660 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="14f276e8-14d3-4e67-956d-ad6212f45c69" containerName="controller-manager" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597666 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4abc7b2-958e-41fb-9bae-b8dd427179e2" containerName="registry-server" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597673 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f822dbf-f355-45f9-b293-f1ab29619ed7" containerName="registry-server" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597681 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b63171f-dadd-465c-98c6-0197a59336a9" containerName="registry-server" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.597688 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7f23b6b-b7a1-46f4-b553-37be88f8fae0" containerName="route-controller-manager" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.598035 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.599555 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.599636 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.599824 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.600408 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.600605 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.600628 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.601638 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9"] Dec 11 10:23:31 crc kubenswrapper[4663]: E1211 10:23:31.602640 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerName="marketplace-operator" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.602661 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerName="marketplace-operator" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.602765 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="05009b2f-f136-4f9d-8d58-169816bb1ea6" containerName="marketplace-operator" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.603328 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.604522 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.606566 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.606602 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.606809 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.607107 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.607310 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.607659 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.607821 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9"] Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.610432 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-598844dbc9-fmphp"] Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.791057 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-config\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.791107 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-serving-cert\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.791141 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/257fa002-0e2b-40b7-a8fb-ce5ffc0e5767-serving-cert\") pod \"route-controller-manager-7d7765684b-mb2c9\" (UID: \"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767\") " pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.791439 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/257fa002-0e2b-40b7-a8fb-ce5ffc0e5767-config\") pod \"route-controller-manager-7d7765684b-mb2c9\" (UID: \"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767\") " pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.791459 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-client-ca\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.791492 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-proxy-ca-bundles\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.791508 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/257fa002-0e2b-40b7-a8fb-ce5ffc0e5767-client-ca\") pod \"route-controller-manager-7d7765684b-mb2c9\" (UID: \"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767\") " pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.791526 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7rrz\" (UniqueName: \"kubernetes.io/projected/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-kube-api-access-n7rrz\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.791549 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn9wp\" (UniqueName: \"kubernetes.io/projected/257fa002-0e2b-40b7-a8fb-ce5ffc0e5767-kube-api-access-xn9wp\") pod \"route-controller-manager-7d7765684b-mb2c9\" (UID: \"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767\") " pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.892913 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-proxy-ca-bundles\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.892971 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/257fa002-0e2b-40b7-a8fb-ce5ffc0e5767-client-ca\") pod \"route-controller-manager-7d7765684b-mb2c9\" (UID: \"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767\") " pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.893002 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7rrz\" (UniqueName: \"kubernetes.io/projected/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-kube-api-access-n7rrz\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.893065 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn9wp\" (UniqueName: \"kubernetes.io/projected/257fa002-0e2b-40b7-a8fb-ce5ffc0e5767-kube-api-access-xn9wp\") pod \"route-controller-manager-7d7765684b-mb2c9\" (UID: \"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767\") " pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.893100 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-config\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.893148 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-serving-cert\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.893216 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/257fa002-0e2b-40b7-a8fb-ce5ffc0e5767-serving-cert\") pod \"route-controller-manager-7d7765684b-mb2c9\" (UID: \"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767\") " pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.893265 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/257fa002-0e2b-40b7-a8fb-ce5ffc0e5767-config\") pod \"route-controller-manager-7d7765684b-mb2c9\" (UID: \"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767\") " pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.893295 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-client-ca\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.894468 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/257fa002-0e2b-40b7-a8fb-ce5ffc0e5767-client-ca\") pod \"route-controller-manager-7d7765684b-mb2c9\" (UID: \"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767\") " pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.894629 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-client-ca\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.895056 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-config\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.895065 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/257fa002-0e2b-40b7-a8fb-ce5ffc0e5767-config\") pod \"route-controller-manager-7d7765684b-mb2c9\" (UID: \"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767\") " pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.895218 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-proxy-ca-bundles\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.899022 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/257fa002-0e2b-40b7-a8fb-ce5ffc0e5767-serving-cert\") pod \"route-controller-manager-7d7765684b-mb2c9\" (UID: \"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767\") " pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.899054 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-serving-cert\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.910059 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn9wp\" (UniqueName: \"kubernetes.io/projected/257fa002-0e2b-40b7-a8fb-ce5ffc0e5767-kube-api-access-xn9wp\") pod \"route-controller-manager-7d7765684b-mb2c9\" (UID: \"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767\") " pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.910513 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7rrz\" (UniqueName: \"kubernetes.io/projected/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-kube-api-access-n7rrz\") pod \"controller-manager-598844dbc9-fmphp\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:31 crc kubenswrapper[4663]: I1211 10:23:31.917362 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.061108 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9"] Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.209857 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.359046 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-598844dbc9-fmphp"] Dec 11 10:23:32 crc kubenswrapper[4663]: W1211 10:23:32.366667 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d6731bd_a91c_4908_bb1e_0a6dfeea2562.slice/crio-b91f2f6abad9471e7589e7fb0f958278b47946542159d53a646ed76b9b61ae38 WatchSource:0}: Error finding container b91f2f6abad9471e7589e7fb0f958278b47946542159d53a646ed76b9b61ae38: Status 404 returned error can't find the container with id b91f2f6abad9471e7589e7fb0f958278b47946542159d53a646ed76b9b61ae38 Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.763138 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" event={"ID":"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767","Type":"ContainerStarted","Data":"075a9d4d53a48940a92306c3bf9f6c04121c7eada16655c4ccc7120ee1447a24"} Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.763459 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" event={"ID":"257fa002-0e2b-40b7-a8fb-ce5ffc0e5767","Type":"ContainerStarted","Data":"9426bfe65e403ba28225b86d8d5717985c2f03b7e1ef9952466414ab6827ca45"} Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.763481 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.765217 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" event={"ID":"1d6731bd-a91c-4908-bb1e-0a6dfeea2562","Type":"ContainerStarted","Data":"410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76"} Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.765244 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" event={"ID":"1d6731bd-a91c-4908-bb1e-0a6dfeea2562","Type":"ContainerStarted","Data":"b91f2f6abad9471e7589e7fb0f958278b47946542159d53a646ed76b9b61ae38"} Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.765465 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.767939 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.768792 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.791954 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7d7765684b-mb2c9" podStartSLOduration=3.791937497 podStartE2EDuration="3.791937497s" podCreationTimestamp="2025-12-11 10:23:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:23:32.777377677 +0000 UTC m=+343.595307451" watchObservedRunningTime="2025-12-11 10:23:32.791937497 +0000 UTC m=+343.609867282" Dec 11 10:23:32 crc kubenswrapper[4663]: I1211 10:23:32.809500 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" podStartSLOduration=3.809482862 podStartE2EDuration="3.809482862s" podCreationTimestamp="2025-12-11 10:23:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:23:32.80710765 +0000 UTC m=+343.625037434" watchObservedRunningTime="2025-12-11 10:23:32.809482862 +0000 UTC m=+343.627412646" Dec 11 10:23:49 crc kubenswrapper[4663]: I1211 10:23:49.271630 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:23:49 crc kubenswrapper[4663]: I1211 10:23:49.272027 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:23:49 crc kubenswrapper[4663]: I1211 10:23:49.759509 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-598844dbc9-fmphp"] Dec 11 10:23:49 crc kubenswrapper[4663]: I1211 10:23:49.759702 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" podUID="1d6731bd-a91c-4908-bb1e-0a6dfeea2562" containerName="controller-manager" containerID="cri-o://410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76" gracePeriod=30 Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.286574 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.433547 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-config\") pod \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.433605 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-serving-cert\") pod \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.433652 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-proxy-ca-bundles\") pod \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.433708 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-client-ca\") pod \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.433795 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7rrz\" (UniqueName: \"kubernetes.io/projected/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-kube-api-access-n7rrz\") pod \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\" (UID: \"1d6731bd-a91c-4908-bb1e-0a6dfeea2562\") " Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.434443 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-client-ca" (OuterVolumeSpecName: "client-ca") pod "1d6731bd-a91c-4908-bb1e-0a6dfeea2562" (UID: "1d6731bd-a91c-4908-bb1e-0a6dfeea2562"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.434480 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-config" (OuterVolumeSpecName: "config") pod "1d6731bd-a91c-4908-bb1e-0a6dfeea2562" (UID: "1d6731bd-a91c-4908-bb1e-0a6dfeea2562"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.434434 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "1d6731bd-a91c-4908-bb1e-0a6dfeea2562" (UID: "1d6731bd-a91c-4908-bb1e-0a6dfeea2562"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.440402 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-kube-api-access-n7rrz" (OuterVolumeSpecName: "kube-api-access-n7rrz") pod "1d6731bd-a91c-4908-bb1e-0a6dfeea2562" (UID: "1d6731bd-a91c-4908-bb1e-0a6dfeea2562"). InnerVolumeSpecName "kube-api-access-n7rrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.441405 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1d6731bd-a91c-4908-bb1e-0a6dfeea2562" (UID: "1d6731bd-a91c-4908-bb1e-0a6dfeea2562"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.535735 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7rrz\" (UniqueName: \"kubernetes.io/projected/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-kube-api-access-n7rrz\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.535772 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.535784 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.535797 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.535805 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1d6731bd-a91c-4908-bb1e-0a6dfeea2562-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.853908 4663 generic.go:334] "Generic (PLEG): container finished" podID="1d6731bd-a91c-4908-bb1e-0a6dfeea2562" containerID="410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76" exitCode=0 Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.853953 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" event={"ID":"1d6731bd-a91c-4908-bb1e-0a6dfeea2562","Type":"ContainerDied","Data":"410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76"} Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.853981 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" event={"ID":"1d6731bd-a91c-4908-bb1e-0a6dfeea2562","Type":"ContainerDied","Data":"b91f2f6abad9471e7589e7fb0f958278b47946542159d53a646ed76b9b61ae38"} Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.853997 4663 scope.go:117] "RemoveContainer" containerID="410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.853957 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-598844dbc9-fmphp" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.866938 4663 scope.go:117] "RemoveContainer" containerID="410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76" Dec 11 10:23:50 crc kubenswrapper[4663]: E1211 10:23:50.867515 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76\": container with ID starting with 410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76 not found: ID does not exist" containerID="410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.867551 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76"} err="failed to get container status \"410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76\": rpc error: code = NotFound desc = could not find container \"410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76\": container with ID starting with 410a94f4529f94ea42906cb7ebd604435cbfb1fa97ae84d6f2a12a6ef3272e76 not found: ID does not exist" Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.873320 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-598844dbc9-fmphp"] Dec 11 10:23:50 crc kubenswrapper[4663]: I1211 10:23:50.875899 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-598844dbc9-fmphp"] Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.511202 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d6731bd-a91c-4908-bb1e-0a6dfeea2562" path="/var/lib/kubelet/pods/1d6731bd-a91c-4908-bb1e-0a6dfeea2562/volumes" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.606037 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-899ddf568-62ppd"] Dec 11 10:23:51 crc kubenswrapper[4663]: E1211 10:23:51.606228 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d6731bd-a91c-4908-bb1e-0a6dfeea2562" containerName="controller-manager" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.606242 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d6731bd-a91c-4908-bb1e-0a6dfeea2562" containerName="controller-manager" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.606319 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d6731bd-a91c-4908-bb1e-0a6dfeea2562" containerName="controller-manager" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.606610 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.608346 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.608633 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.608797 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.608828 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.608909 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.610094 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.615613 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-899ddf568-62ppd"] Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.617170 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.751548 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-serving-cert\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.751605 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-config\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.751644 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvxb8\" (UniqueName: \"kubernetes.io/projected/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-kube-api-access-cvxb8\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.751733 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-proxy-ca-bundles\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.751798 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-client-ca\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.853004 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-serving-cert\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.853054 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-config\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.853093 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvxb8\" (UniqueName: \"kubernetes.io/projected/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-kube-api-access-cvxb8\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.853147 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-proxy-ca-bundles\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.853233 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-client-ca\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.854267 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-client-ca\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.854967 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-config\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.854998 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-proxy-ca-bundles\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.859686 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-serving-cert\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.869339 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvxb8\" (UniqueName: \"kubernetes.io/projected/78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf-kube-api-access-cvxb8\") pod \"controller-manager-899ddf568-62ppd\" (UID: \"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf\") " pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:51 crc kubenswrapper[4663]: I1211 10:23:51.919476 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.299623 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-899ddf568-62ppd"] Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.458854 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-t8zgh"] Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.459712 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.467091 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-t8zgh"] Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.561273 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hhts\" (UniqueName: \"kubernetes.io/projected/f3ebd3e2-6a84-417d-ad50-b1e505a02783-kube-api-access-9hhts\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.561317 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f3ebd3e2-6a84-417d-ad50-b1e505a02783-registry-tls\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.561334 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f3ebd3e2-6a84-417d-ad50-b1e505a02783-installation-pull-secrets\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.561362 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.561539 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f3ebd3e2-6a84-417d-ad50-b1e505a02783-registry-certificates\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.561621 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f3ebd3e2-6a84-417d-ad50-b1e505a02783-trusted-ca\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.561713 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f3ebd3e2-6a84-417d-ad50-b1e505a02783-bound-sa-token\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.561793 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f3ebd3e2-6a84-417d-ad50-b1e505a02783-ca-trust-extracted\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.613531 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.664985 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f3ebd3e2-6a84-417d-ad50-b1e505a02783-trusted-ca\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.663416 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f3ebd3e2-6a84-417d-ad50-b1e505a02783-trusted-ca\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.665111 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f3ebd3e2-6a84-417d-ad50-b1e505a02783-bound-sa-token\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.665651 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f3ebd3e2-6a84-417d-ad50-b1e505a02783-ca-trust-extracted\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.665833 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hhts\" (UniqueName: \"kubernetes.io/projected/f3ebd3e2-6a84-417d-ad50-b1e505a02783-kube-api-access-9hhts\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.665876 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f3ebd3e2-6a84-417d-ad50-b1e505a02783-registry-tls\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.665904 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f3ebd3e2-6a84-417d-ad50-b1e505a02783-installation-pull-secrets\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.666001 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f3ebd3e2-6a84-417d-ad50-b1e505a02783-registry-certificates\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.666744 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f3ebd3e2-6a84-417d-ad50-b1e505a02783-ca-trust-extracted\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.667050 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f3ebd3e2-6a84-417d-ad50-b1e505a02783-registry-certificates\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.672330 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f3ebd3e2-6a84-417d-ad50-b1e505a02783-registry-tls\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.673644 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f3ebd3e2-6a84-417d-ad50-b1e505a02783-installation-pull-secrets\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.682965 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hhts\" (UniqueName: \"kubernetes.io/projected/f3ebd3e2-6a84-417d-ad50-b1e505a02783-kube-api-access-9hhts\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.684078 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f3ebd3e2-6a84-417d-ad50-b1e505a02783-bound-sa-token\") pod \"image-registry-66df7c8f76-t8zgh\" (UID: \"f3ebd3e2-6a84-417d-ad50-b1e505a02783\") " pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.777612 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.866112 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" event={"ID":"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf","Type":"ContainerStarted","Data":"d23c3af325fc96a9b88331c7a61dbae6279d8ddbb5f551941ec7b485acf8b26a"} Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.866457 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" event={"ID":"78f9cddd-a5bb-4dcf-a4a4-3acc621b9ebf","Type":"ContainerStarted","Data":"944ececeedc296cab2943038bbbaa48a9d81db5f25fa71bc5f3e1bab7ea8112c"} Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.867077 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.872108 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.884361 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-899ddf568-62ppd" podStartSLOduration=3.884342099 podStartE2EDuration="3.884342099s" podCreationTimestamp="2025-12-11 10:23:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:23:52.88132147 +0000 UTC m=+363.699251253" watchObservedRunningTime="2025-12-11 10:23:52.884342099 +0000 UTC m=+363.702271883" Dec 11 10:23:52 crc kubenswrapper[4663]: I1211 10:23:52.965808 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-t8zgh"] Dec 11 10:23:53 crc kubenswrapper[4663]: I1211 10:23:53.874567 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" event={"ID":"f3ebd3e2-6a84-417d-ad50-b1e505a02783","Type":"ContainerStarted","Data":"a3351e1e61bf778361b0e848d577515821d7a7fb3ca11720a7b1110ff7e17f65"} Dec 11 10:23:53 crc kubenswrapper[4663]: I1211 10:23:53.874623 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" event={"ID":"f3ebd3e2-6a84-417d-ad50-b1e505a02783","Type":"ContainerStarted","Data":"5c2ce7e7aa353a8cf1529e9a76bb6c2b9bd50b4b75b9fd0903ab8b71ab29def8"} Dec 11 10:23:53 crc kubenswrapper[4663]: I1211 10:23:53.891983 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" podStartSLOduration=1.8919671729999998 podStartE2EDuration="1.891967173s" podCreationTimestamp="2025-12-11 10:23:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:23:53.890368127 +0000 UTC m=+364.708297910" watchObservedRunningTime="2025-12-11 10:23:53.891967173 +0000 UTC m=+364.709896957" Dec 11 10:23:54 crc kubenswrapper[4663]: I1211 10:23:54.880722 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.414536 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-txl9v"] Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.416096 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.418424 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.430657 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-txl9v"] Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.541771 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhglj\" (UniqueName: \"kubernetes.io/projected/af7e9963-0223-494a-9b8d-494f4c643019-kube-api-access-fhglj\") pod \"redhat-marketplace-txl9v\" (UID: \"af7e9963-0223-494a-9b8d-494f4c643019\") " pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.541859 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af7e9963-0223-494a-9b8d-494f4c643019-utilities\") pod \"redhat-marketplace-txl9v\" (UID: \"af7e9963-0223-494a-9b8d-494f4c643019\") " pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.541897 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af7e9963-0223-494a-9b8d-494f4c643019-catalog-content\") pod \"redhat-marketplace-txl9v\" (UID: \"af7e9963-0223-494a-9b8d-494f4c643019\") " pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.643388 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhglj\" (UniqueName: \"kubernetes.io/projected/af7e9963-0223-494a-9b8d-494f4c643019-kube-api-access-fhglj\") pod \"redhat-marketplace-txl9v\" (UID: \"af7e9963-0223-494a-9b8d-494f4c643019\") " pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.644669 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af7e9963-0223-494a-9b8d-494f4c643019-utilities\") pod \"redhat-marketplace-txl9v\" (UID: \"af7e9963-0223-494a-9b8d-494f4c643019\") " pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.644976 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af7e9963-0223-494a-9b8d-494f4c643019-catalog-content\") pod \"redhat-marketplace-txl9v\" (UID: \"af7e9963-0223-494a-9b8d-494f4c643019\") " pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.645164 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af7e9963-0223-494a-9b8d-494f4c643019-utilities\") pod \"redhat-marketplace-txl9v\" (UID: \"af7e9963-0223-494a-9b8d-494f4c643019\") " pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.645517 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af7e9963-0223-494a-9b8d-494f4c643019-catalog-content\") pod \"redhat-marketplace-txl9v\" (UID: \"af7e9963-0223-494a-9b8d-494f4c643019\") " pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.661706 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhglj\" (UniqueName: \"kubernetes.io/projected/af7e9963-0223-494a-9b8d-494f4c643019-kube-api-access-fhglj\") pod \"redhat-marketplace-txl9v\" (UID: \"af7e9963-0223-494a-9b8d-494f4c643019\") " pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:04 crc kubenswrapper[4663]: I1211 10:24:04.742730 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.105029 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-txl9v"] Dec 11 10:24:05 crc kubenswrapper[4663]: W1211 10:24:05.112825 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf7e9963_0223_494a_9b8d_494f4c643019.slice/crio-28579105fb56b72bd3153e902e5ab57ed0276d42fe862da74985e62323e6aefe WatchSource:0}: Error finding container 28579105fb56b72bd3153e902e5ab57ed0276d42fe862da74985e62323e6aefe: Status 404 returned error can't find the container with id 28579105fb56b72bd3153e902e5ab57ed0276d42fe862da74985e62323e6aefe Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.411104 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-g99nl"] Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.412193 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.414411 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.420488 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g99nl"] Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.560327 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9-catalog-content\") pod \"redhat-operators-g99nl\" (UID: \"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9\") " pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.560416 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9-utilities\") pod \"redhat-operators-g99nl\" (UID: \"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9\") " pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.560714 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc6n5\" (UniqueName: \"kubernetes.io/projected/e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9-kube-api-access-kc6n5\") pod \"redhat-operators-g99nl\" (UID: \"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9\") " pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.662293 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9-utilities\") pod \"redhat-operators-g99nl\" (UID: \"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9\") " pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.662387 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc6n5\" (UniqueName: \"kubernetes.io/projected/e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9-kube-api-access-kc6n5\") pod \"redhat-operators-g99nl\" (UID: \"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9\") " pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.662428 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9-catalog-content\") pod \"redhat-operators-g99nl\" (UID: \"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9\") " pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.662765 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9-utilities\") pod \"redhat-operators-g99nl\" (UID: \"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9\") " pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.663056 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9-catalog-content\") pod \"redhat-operators-g99nl\" (UID: \"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9\") " pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.679629 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc6n5\" (UniqueName: \"kubernetes.io/projected/e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9-kube-api-access-kc6n5\") pod \"redhat-operators-g99nl\" (UID: \"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9\") " pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.725564 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.947269 4663 generic.go:334] "Generic (PLEG): container finished" podID="af7e9963-0223-494a-9b8d-494f4c643019" containerID="b1132bd4cacefa5221c189e28c64bcd9f450468d76f79061264db05071f96fd2" exitCode=0 Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.947385 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txl9v" event={"ID":"af7e9963-0223-494a-9b8d-494f4c643019","Type":"ContainerDied","Data":"b1132bd4cacefa5221c189e28c64bcd9f450468d76f79061264db05071f96fd2"} Dec 11 10:24:05 crc kubenswrapper[4663]: I1211 10:24:05.947574 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txl9v" event={"ID":"af7e9963-0223-494a-9b8d-494f4c643019","Type":"ContainerStarted","Data":"28579105fb56b72bd3153e902e5ab57ed0276d42fe862da74985e62323e6aefe"} Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.099383 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g99nl"] Dec 11 10:24:06 crc kubenswrapper[4663]: W1211 10:24:06.104094 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7a9b9ae_8043_47cc_af42_b9b5c99c6bb9.slice/crio-51af5dd84d9b7fd6b76bdcf23379942bd895a18798a4cc1d2ee4c25830a9516a WatchSource:0}: Error finding container 51af5dd84d9b7fd6b76bdcf23379942bd895a18798a4cc1d2ee4c25830a9516a: Status 404 returned error can't find the container with id 51af5dd84d9b7fd6b76bdcf23379942bd895a18798a4cc1d2ee4c25830a9516a Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.805788 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z25sl"] Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.806877 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.808989 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.816259 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z25sl"] Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.953950 4663 generic.go:334] "Generic (PLEG): container finished" podID="af7e9963-0223-494a-9b8d-494f4c643019" containerID="e5e93490a6f8f82a58be5111178bba8a130bb2553c666cbaa07bfc34df60858d" exitCode=0 Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.954050 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txl9v" event={"ID":"af7e9963-0223-494a-9b8d-494f4c643019","Type":"ContainerDied","Data":"e5e93490a6f8f82a58be5111178bba8a130bb2553c666cbaa07bfc34df60858d"} Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.956650 4663 generic.go:334] "Generic (PLEG): container finished" podID="e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9" containerID="b41ef90e7be91ab2adf154c71aa2639ca48461578b1b03d9c5124cd45e5abae0" exitCode=0 Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.956694 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g99nl" event={"ID":"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9","Type":"ContainerDied","Data":"b41ef90e7be91ab2adf154c71aa2639ca48461578b1b03d9c5124cd45e5abae0"} Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.956742 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g99nl" event={"ID":"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9","Type":"ContainerStarted","Data":"51af5dd84d9b7fd6b76bdcf23379942bd895a18798a4cc1d2ee4c25830a9516a"} Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.982949 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d5b748-8f69-4c3d-903a-5da6078bc36a-catalog-content\") pod \"certified-operators-z25sl\" (UID: \"d9d5b748-8f69-4c3d-903a-5da6078bc36a\") " pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.983037 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktcvc\" (UniqueName: \"kubernetes.io/projected/d9d5b748-8f69-4c3d-903a-5da6078bc36a-kube-api-access-ktcvc\") pod \"certified-operators-z25sl\" (UID: \"d9d5b748-8f69-4c3d-903a-5da6078bc36a\") " pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:06 crc kubenswrapper[4663]: I1211 10:24:06.983171 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d5b748-8f69-4c3d-903a-5da6078bc36a-utilities\") pod \"certified-operators-z25sl\" (UID: \"d9d5b748-8f69-4c3d-903a-5da6078bc36a\") " pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.084474 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d5b748-8f69-4c3d-903a-5da6078bc36a-catalog-content\") pod \"certified-operators-z25sl\" (UID: \"d9d5b748-8f69-4c3d-903a-5da6078bc36a\") " pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.084566 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktcvc\" (UniqueName: \"kubernetes.io/projected/d9d5b748-8f69-4c3d-903a-5da6078bc36a-kube-api-access-ktcvc\") pod \"certified-operators-z25sl\" (UID: \"d9d5b748-8f69-4c3d-903a-5da6078bc36a\") " pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.084605 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d5b748-8f69-4c3d-903a-5da6078bc36a-utilities\") pod \"certified-operators-z25sl\" (UID: \"d9d5b748-8f69-4c3d-903a-5da6078bc36a\") " pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.084977 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d5b748-8f69-4c3d-903a-5da6078bc36a-catalog-content\") pod \"certified-operators-z25sl\" (UID: \"d9d5b748-8f69-4c3d-903a-5da6078bc36a\") " pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.085010 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d5b748-8f69-4c3d-903a-5da6078bc36a-utilities\") pod \"certified-operators-z25sl\" (UID: \"d9d5b748-8f69-4c3d-903a-5da6078bc36a\") " pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.102007 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktcvc\" (UniqueName: \"kubernetes.io/projected/d9d5b748-8f69-4c3d-903a-5da6078bc36a-kube-api-access-ktcvc\") pod \"certified-operators-z25sl\" (UID: \"d9d5b748-8f69-4c3d-903a-5da6078bc36a\") " pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.120848 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.480309 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z25sl"] Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.808585 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qr54z"] Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.811105 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.813337 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.818500 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qr54z"] Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.963880 4663 generic.go:334] "Generic (PLEG): container finished" podID="d9d5b748-8f69-4c3d-903a-5da6078bc36a" containerID="edb4fd418ecf478ef94ce012a3fee3e93ce47650bb16efefe5a3364dabce4508" exitCode=0 Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.963988 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z25sl" event={"ID":"d9d5b748-8f69-4c3d-903a-5da6078bc36a","Type":"ContainerDied","Data":"edb4fd418ecf478ef94ce012a3fee3e93ce47650bb16efefe5a3364dabce4508"} Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.964045 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z25sl" event={"ID":"d9d5b748-8f69-4c3d-903a-5da6078bc36a","Type":"ContainerStarted","Data":"c272f4f0d3d0d9f2aad712bd3783c4337e003f3ccd04cb0016448197ae43eb7d"} Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.966480 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txl9v" event={"ID":"af7e9963-0223-494a-9b8d-494f4c643019","Type":"ContainerStarted","Data":"9661421866be5fd9097ceee4f5bd8c64fe486d606dc2a49d69735b1b84d99f14"} Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.993324 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-txl9v" podStartSLOduration=2.374324963 podStartE2EDuration="3.99330295s" podCreationTimestamp="2025-12-11 10:24:04 +0000 UTC" firstStartedPulling="2025-12-11 10:24:05.948926896 +0000 UTC m=+376.766856680" lastFinishedPulling="2025-12-11 10:24:07.567904883 +0000 UTC m=+378.385834667" observedRunningTime="2025-12-11 10:24:07.992979039 +0000 UTC m=+378.810908852" watchObservedRunningTime="2025-12-11 10:24:07.99330295 +0000 UTC m=+378.811232734" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.997902 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb-catalog-content\") pod \"community-operators-qr54z\" (UID: \"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb\") " pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.998070 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz4hv\" (UniqueName: \"kubernetes.io/projected/9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb-kube-api-access-bz4hv\") pod \"community-operators-qr54z\" (UID: \"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb\") " pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:07 crc kubenswrapper[4663]: I1211 10:24:07.998121 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb-utilities\") pod \"community-operators-qr54z\" (UID: \"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb\") " pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.099952 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb-catalog-content\") pod \"community-operators-qr54z\" (UID: \"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb\") " pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.100093 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz4hv\" (UniqueName: \"kubernetes.io/projected/9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb-kube-api-access-bz4hv\") pod \"community-operators-qr54z\" (UID: \"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb\") " pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.100119 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb-utilities\") pod \"community-operators-qr54z\" (UID: \"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb\") " pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.100795 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb-catalog-content\") pod \"community-operators-qr54z\" (UID: \"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb\") " pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.100819 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb-utilities\") pod \"community-operators-qr54z\" (UID: \"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb\") " pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.117643 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz4hv\" (UniqueName: \"kubernetes.io/projected/9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb-kube-api-access-bz4hv\") pod \"community-operators-qr54z\" (UID: \"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb\") " pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.125226 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.491283 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qr54z"] Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.971333 4663 generic.go:334] "Generic (PLEG): container finished" podID="9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb" containerID="b3c68818cb48a16c48ccd487552461699c1abe8a9d30b5c2c713e632e7728c5c" exitCode=0 Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.971377 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qr54z" event={"ID":"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb","Type":"ContainerDied","Data":"b3c68818cb48a16c48ccd487552461699c1abe8a9d30b5c2c713e632e7728c5c"} Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.971695 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qr54z" event={"ID":"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb","Type":"ContainerStarted","Data":"39843df0640894f1ca339ab297d6964c5f4b27c53268cf5694bf8b029b990c61"} Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.973836 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z25sl" event={"ID":"d9d5b748-8f69-4c3d-903a-5da6078bc36a","Type":"ContainerStarted","Data":"b6557aa462a4e36bc356e01c291ecf181751727be26842def443cbc532d87ae4"} Dec 11 10:24:08 crc kubenswrapper[4663]: I1211 10:24:08.975700 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g99nl" event={"ID":"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9","Type":"ContainerStarted","Data":"51470cb067e17b62cf76a560060de406164e50070662b3e1830d2eefb1ebfe7b"} Dec 11 10:24:09 crc kubenswrapper[4663]: I1211 10:24:09.982992 4663 generic.go:334] "Generic (PLEG): container finished" podID="e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9" containerID="51470cb067e17b62cf76a560060de406164e50070662b3e1830d2eefb1ebfe7b" exitCode=0 Dec 11 10:24:09 crc kubenswrapper[4663]: I1211 10:24:09.983107 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g99nl" event={"ID":"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9","Type":"ContainerDied","Data":"51470cb067e17b62cf76a560060de406164e50070662b3e1830d2eefb1ebfe7b"} Dec 11 10:24:09 crc kubenswrapper[4663]: I1211 10:24:09.985139 4663 generic.go:334] "Generic (PLEG): container finished" podID="d9d5b748-8f69-4c3d-903a-5da6078bc36a" containerID="b6557aa462a4e36bc356e01c291ecf181751727be26842def443cbc532d87ae4" exitCode=0 Dec 11 10:24:09 crc kubenswrapper[4663]: I1211 10:24:09.985207 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z25sl" event={"ID":"d9d5b748-8f69-4c3d-903a-5da6078bc36a","Type":"ContainerDied","Data":"b6557aa462a4e36bc356e01c291ecf181751727be26842def443cbc532d87ae4"} Dec 11 10:24:10 crc kubenswrapper[4663]: I1211 10:24:10.997724 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z25sl" event={"ID":"d9d5b748-8f69-4c3d-903a-5da6078bc36a","Type":"ContainerStarted","Data":"4a048ae59f41e1349dd93126a6068d2dd82497dec87b1e39a33e931187a885cb"} Dec 11 10:24:10 crc kubenswrapper[4663]: I1211 10:24:10.999727 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g99nl" event={"ID":"e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9","Type":"ContainerStarted","Data":"f40f989c59f60722eeb9f18a9591c8a1f5ce84ab4ccb22d12fcd67d8431cae5b"} Dec 11 10:24:11 crc kubenswrapper[4663]: I1211 10:24:11.002199 4663 generic.go:334] "Generic (PLEG): container finished" podID="9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb" containerID="ae20757926b0a732e20f7c7b659cc6afb6ffa9d47aa0a7a7444ae8a76046f7c3" exitCode=0 Dec 11 10:24:11 crc kubenswrapper[4663]: I1211 10:24:11.002263 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qr54z" event={"ID":"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb","Type":"ContainerDied","Data":"ae20757926b0a732e20f7c7b659cc6afb6ffa9d47aa0a7a7444ae8a76046f7c3"} Dec 11 10:24:11 crc kubenswrapper[4663]: I1211 10:24:11.017735 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z25sl" podStartSLOduration=2.506080352 podStartE2EDuration="5.017718049s" podCreationTimestamp="2025-12-11 10:24:06 +0000 UTC" firstStartedPulling="2025-12-11 10:24:07.965415291 +0000 UTC m=+378.783345075" lastFinishedPulling="2025-12-11 10:24:10.477052988 +0000 UTC m=+381.294982772" observedRunningTime="2025-12-11 10:24:11.013520939 +0000 UTC m=+381.831450723" watchObservedRunningTime="2025-12-11 10:24:11.017718049 +0000 UTC m=+381.835647833" Dec 11 10:24:11 crc kubenswrapper[4663]: I1211 10:24:11.032107 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-g99nl" podStartSLOduration=2.346400822 podStartE2EDuration="6.032085267s" podCreationTimestamp="2025-12-11 10:24:05 +0000 UTC" firstStartedPulling="2025-12-11 10:24:06.958621626 +0000 UTC m=+377.776551409" lastFinishedPulling="2025-12-11 10:24:10.64430607 +0000 UTC m=+381.462235854" observedRunningTime="2025-12-11 10:24:11.029361136 +0000 UTC m=+381.847290921" watchObservedRunningTime="2025-12-11 10:24:11.032085267 +0000 UTC m=+381.850015050" Dec 11 10:24:12 crc kubenswrapper[4663]: I1211 10:24:12.009362 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qr54z" event={"ID":"9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb","Type":"ContainerStarted","Data":"d46ade2f6c1b274691546c69a54ff129cb2d3ea7d683cc1e716dffc004dc5a06"} Dec 11 10:24:12 crc kubenswrapper[4663]: I1211 10:24:12.024669 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qr54z" podStartSLOduration=2.458607529 podStartE2EDuration="5.024644886s" podCreationTimestamp="2025-12-11 10:24:07 +0000 UTC" firstStartedPulling="2025-12-11 10:24:08.972596057 +0000 UTC m=+379.790525842" lastFinishedPulling="2025-12-11 10:24:11.538633415 +0000 UTC m=+382.356563199" observedRunningTime="2025-12-11 10:24:12.022690699 +0000 UTC m=+382.840620483" watchObservedRunningTime="2025-12-11 10:24:12.024644886 +0000 UTC m=+382.842574670" Dec 11 10:24:12 crc kubenswrapper[4663]: I1211 10:24:12.786816 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-t8zgh" Dec 11 10:24:12 crc kubenswrapper[4663]: I1211 10:24:12.832381 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-mxhbc"] Dec 11 10:24:14 crc kubenswrapper[4663]: I1211 10:24:14.743107 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:14 crc kubenswrapper[4663]: I1211 10:24:14.743675 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:14 crc kubenswrapper[4663]: I1211 10:24:14.781840 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:15 crc kubenswrapper[4663]: I1211 10:24:15.054508 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-txl9v" Dec 11 10:24:15 crc kubenswrapper[4663]: I1211 10:24:15.726547 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:15 crc kubenswrapper[4663]: I1211 10:24:15.727200 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:15 crc kubenswrapper[4663]: I1211 10:24:15.762548 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:16 crc kubenswrapper[4663]: I1211 10:24:16.065713 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-g99nl" Dec 11 10:24:17 crc kubenswrapper[4663]: I1211 10:24:17.121154 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:17 crc kubenswrapper[4663]: I1211 10:24:17.121537 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:17 crc kubenswrapper[4663]: I1211 10:24:17.150620 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:18 crc kubenswrapper[4663]: I1211 10:24:18.073847 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z25sl" Dec 11 10:24:18 crc kubenswrapper[4663]: I1211 10:24:18.125520 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:18 crc kubenswrapper[4663]: I1211 10:24:18.125557 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:18 crc kubenswrapper[4663]: I1211 10:24:18.153399 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:19 crc kubenswrapper[4663]: I1211 10:24:19.085518 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qr54z" Dec 11 10:24:19 crc kubenswrapper[4663]: I1211 10:24:19.271129 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:24:19 crc kubenswrapper[4663]: I1211 10:24:19.271979 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:24:19 crc kubenswrapper[4663]: I1211 10:24:19.272225 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:24:19 crc kubenswrapper[4663]: I1211 10:24:19.273513 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"894133a128575857e18fd9f49a2fe8c20c1e724cdefaa6dd3b25402ad2d9e35b"} pod="openshift-machine-config-operator/machine-config-daemon-mq524" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:24:19 crc kubenswrapper[4663]: I1211 10:24:19.273688 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" containerID="cri-o://894133a128575857e18fd9f49a2fe8c20c1e724cdefaa6dd3b25402ad2d9e35b" gracePeriod=600 Dec 11 10:24:20 crc kubenswrapper[4663]: I1211 10:24:20.057262 4663 generic.go:334] "Generic (PLEG): container finished" podID="98941020-2764-490d-81db-9decd21aaa13" containerID="894133a128575857e18fd9f49a2fe8c20c1e724cdefaa6dd3b25402ad2d9e35b" exitCode=0 Dec 11 10:24:20 crc kubenswrapper[4663]: I1211 10:24:20.057345 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerDied","Data":"894133a128575857e18fd9f49a2fe8c20c1e724cdefaa6dd3b25402ad2d9e35b"} Dec 11 10:24:20 crc kubenswrapper[4663]: I1211 10:24:20.057597 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerStarted","Data":"71596b251b77ae3f29e9862f15f8d7073ec4222881d08d3a5d12d46cc1d6d5ef"} Dec 11 10:24:20 crc kubenswrapper[4663]: I1211 10:24:20.057623 4663 scope.go:117] "RemoveContainer" containerID="0ed2238a7f3178a2c325e0f77a94934af6c907c13d0340e7f4c67b744669b56b" Dec 11 10:24:37 crc kubenswrapper[4663]: I1211 10:24:37.869028 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" podUID="62062293-e497-4499-b728-226260dc0ae1" containerName="registry" containerID="cri-o://c91f81e461b2d76472d0aa7b28b5281c66994943f75f682906c7e27a750cb196" gracePeriod=30 Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.179319 4663 generic.go:334] "Generic (PLEG): container finished" podID="62062293-e497-4499-b728-226260dc0ae1" containerID="c91f81e461b2d76472d0aa7b28b5281c66994943f75f682906c7e27a750cb196" exitCode=0 Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.179361 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" event={"ID":"62062293-e497-4499-b728-226260dc0ae1","Type":"ContainerDied","Data":"c91f81e461b2d76472d0aa7b28b5281c66994943f75f682906c7e27a750cb196"} Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.239348 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.271314 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzng4\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-kube-api-access-kzng4\") pod \"62062293-e497-4499-b728-226260dc0ae1\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.271426 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-bound-sa-token\") pod \"62062293-e497-4499-b728-226260dc0ae1\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.271669 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"62062293-e497-4499-b728-226260dc0ae1\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.271707 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/62062293-e497-4499-b728-226260dc0ae1-installation-pull-secrets\") pod \"62062293-e497-4499-b728-226260dc0ae1\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.271742 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62062293-e497-4499-b728-226260dc0ae1-trusted-ca\") pod \"62062293-e497-4499-b728-226260dc0ae1\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.271774 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/62062293-e497-4499-b728-226260dc0ae1-ca-trust-extracted\") pod \"62062293-e497-4499-b728-226260dc0ae1\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.271791 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/62062293-e497-4499-b728-226260dc0ae1-registry-certificates\") pod \"62062293-e497-4499-b728-226260dc0ae1\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.271815 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-registry-tls\") pod \"62062293-e497-4499-b728-226260dc0ae1\" (UID: \"62062293-e497-4499-b728-226260dc0ae1\") " Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.273099 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62062293-e497-4499-b728-226260dc0ae1-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "62062293-e497-4499-b728-226260dc0ae1" (UID: "62062293-e497-4499-b728-226260dc0ae1"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.273163 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62062293-e497-4499-b728-226260dc0ae1-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "62062293-e497-4499-b728-226260dc0ae1" (UID: "62062293-e497-4499-b728-226260dc0ae1"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.277302 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "62062293-e497-4499-b728-226260dc0ae1" (UID: "62062293-e497-4499-b728-226260dc0ae1"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.277591 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-kube-api-access-kzng4" (OuterVolumeSpecName: "kube-api-access-kzng4") pod "62062293-e497-4499-b728-226260dc0ae1" (UID: "62062293-e497-4499-b728-226260dc0ae1"). InnerVolumeSpecName "kube-api-access-kzng4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.277906 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62062293-e497-4499-b728-226260dc0ae1-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "62062293-e497-4499-b728-226260dc0ae1" (UID: "62062293-e497-4499-b728-226260dc0ae1"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.278680 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "62062293-e497-4499-b728-226260dc0ae1" (UID: "62062293-e497-4499-b728-226260dc0ae1"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.283463 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "62062293-e497-4499-b728-226260dc0ae1" (UID: "62062293-e497-4499-b728-226260dc0ae1"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.290462 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62062293-e497-4499-b728-226260dc0ae1-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "62062293-e497-4499-b728-226260dc0ae1" (UID: "62062293-e497-4499-b728-226260dc0ae1"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.374204 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzng4\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-kube-api-access-kzng4\") on node \"crc\" DevicePath \"\"" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.374261 4663 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.374274 4663 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/62062293-e497-4499-b728-226260dc0ae1-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.374288 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62062293-e497-4499-b728-226260dc0ae1-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.374301 4663 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/62062293-e497-4499-b728-226260dc0ae1-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.374310 4663 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/62062293-e497-4499-b728-226260dc0ae1-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 11 10:24:38 crc kubenswrapper[4663]: I1211 10:24:38.374320 4663 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/62062293-e497-4499-b728-226260dc0ae1-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 11 10:24:39 crc kubenswrapper[4663]: I1211 10:24:39.187451 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" event={"ID":"62062293-e497-4499-b728-226260dc0ae1","Type":"ContainerDied","Data":"a0cff407b9879f0f99547db5d34349d1bc163b9c806b95dc8aa4ac2f5e0510cf"} Dec 11 10:24:39 crc kubenswrapper[4663]: I1211 10:24:39.187498 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-mxhbc" Dec 11 10:24:39 crc kubenswrapper[4663]: I1211 10:24:39.188257 4663 scope.go:117] "RemoveContainer" containerID="c91f81e461b2d76472d0aa7b28b5281c66994943f75f682906c7e27a750cb196" Dec 11 10:24:39 crc kubenswrapper[4663]: I1211 10:24:39.215306 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-mxhbc"] Dec 11 10:24:39 crc kubenswrapper[4663]: I1211 10:24:39.217578 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-mxhbc"] Dec 11 10:24:39 crc kubenswrapper[4663]: I1211 10:24:39.511408 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62062293-e497-4499-b728-226260dc0ae1" path="/var/lib/kubelet/pods/62062293-e497-4499-b728-226260dc0ae1/volumes" Dec 11 10:26:49 crc kubenswrapper[4663]: I1211 10:26:49.271710 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:26:49 crc kubenswrapper[4663]: I1211 10:26:49.272149 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:27:19 crc kubenswrapper[4663]: I1211 10:27:19.271513 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:27:19 crc kubenswrapper[4663]: I1211 10:27:19.272641 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:27:49 crc kubenswrapper[4663]: I1211 10:27:49.271158 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:27:49 crc kubenswrapper[4663]: I1211 10:27:49.271751 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:27:49 crc kubenswrapper[4663]: I1211 10:27:49.271790 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:27:49 crc kubenswrapper[4663]: I1211 10:27:49.272142 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"71596b251b77ae3f29e9862f15f8d7073ec4222881d08d3a5d12d46cc1d6d5ef"} pod="openshift-machine-config-operator/machine-config-daemon-mq524" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:27:49 crc kubenswrapper[4663]: I1211 10:27:49.272203 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" containerID="cri-o://71596b251b77ae3f29e9862f15f8d7073ec4222881d08d3a5d12d46cc1d6d5ef" gracePeriod=600 Dec 11 10:27:49 crc kubenswrapper[4663]: I1211 10:27:49.996991 4663 generic.go:334] "Generic (PLEG): container finished" podID="98941020-2764-490d-81db-9decd21aaa13" containerID="71596b251b77ae3f29e9862f15f8d7073ec4222881d08d3a5d12d46cc1d6d5ef" exitCode=0 Dec 11 10:27:49 crc kubenswrapper[4663]: I1211 10:27:49.997065 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerDied","Data":"71596b251b77ae3f29e9862f15f8d7073ec4222881d08d3a5d12d46cc1d6d5ef"} Dec 11 10:27:49 crc kubenswrapper[4663]: I1211 10:27:49.997291 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerStarted","Data":"d6a383f638ba877808ee9ae4a0f108f5e0f431193b4b9388090fdfc37a56b27f"} Dec 11 10:27:49 crc kubenswrapper[4663]: I1211 10:27:49.997312 4663 scope.go:117] "RemoveContainer" containerID="894133a128575857e18fd9f49a2fe8c20c1e724cdefaa6dd3b25402ad2d9e35b" Dec 11 10:28:10 crc kubenswrapper[4663]: I1211 10:28:10.990371 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-2fdfp"] Dec 11 10:28:10 crc kubenswrapper[4663]: E1211 10:28:10.990996 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62062293-e497-4499-b728-226260dc0ae1" containerName="registry" Dec 11 10:28:10 crc kubenswrapper[4663]: I1211 10:28:10.991009 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="62062293-e497-4499-b728-226260dc0ae1" containerName="registry" Dec 11 10:28:10 crc kubenswrapper[4663]: I1211 10:28:10.991088 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="62062293-e497-4499-b728-226260dc0ae1" containerName="registry" Dec 11 10:28:10 crc kubenswrapper[4663]: I1211 10:28:10.991445 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-2fdfp" Dec 11 10:28:10 crc kubenswrapper[4663]: I1211 10:28:10.993467 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-zmshh"] Dec 11 10:28:10 crc kubenswrapper[4663]: I1211 10:28:10.999284 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 11 10:28:10 crc kubenswrapper[4663]: I1211 10:28:10.999812 4663 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-8kpqv" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.000330 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.008426 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-zmshh"] Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.008579 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-zmshh" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.011813 4663 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-d6ncb" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.017496 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-2fdfp"] Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.028539 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-m84qd"] Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.030528 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-m84qd" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.030902 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-m84qd"] Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.032202 4663 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-82nps" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.092036 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2dvg\" (UniqueName: \"kubernetes.io/projected/c4c3d255-ffd5-4eab-99b7-d62d9b664a16-kube-api-access-t2dvg\") pod \"cert-manager-5b446d88c5-2fdfp\" (UID: \"c4c3d255-ffd5-4eab-99b7-d62d9b664a16\") " pod="cert-manager/cert-manager-5b446d88c5-2fdfp" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.092084 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6c99\" (UniqueName: \"kubernetes.io/projected/93fa4443-e685-41c3-9801-d2a37325674a-kube-api-access-q6c99\") pod \"cert-manager-webhook-5655c58dd6-m84qd\" (UID: \"93fa4443-e685-41c3-9801-d2a37325674a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-m84qd" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.092259 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7mtd\" (UniqueName: \"kubernetes.io/projected/2f9a5b46-b79d-496d-9a78-132d85f4f115-kube-api-access-j7mtd\") pod \"cert-manager-cainjector-7f985d654d-zmshh\" (UID: \"2f9a5b46-b79d-496d-9a78-132d85f4f115\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-zmshh" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.193866 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2dvg\" (UniqueName: \"kubernetes.io/projected/c4c3d255-ffd5-4eab-99b7-d62d9b664a16-kube-api-access-t2dvg\") pod \"cert-manager-5b446d88c5-2fdfp\" (UID: \"c4c3d255-ffd5-4eab-99b7-d62d9b664a16\") " pod="cert-manager/cert-manager-5b446d88c5-2fdfp" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.193927 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6c99\" (UniqueName: \"kubernetes.io/projected/93fa4443-e685-41c3-9801-d2a37325674a-kube-api-access-q6c99\") pod \"cert-manager-webhook-5655c58dd6-m84qd\" (UID: \"93fa4443-e685-41c3-9801-d2a37325674a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-m84qd" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.194005 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7mtd\" (UniqueName: \"kubernetes.io/projected/2f9a5b46-b79d-496d-9a78-132d85f4f115-kube-api-access-j7mtd\") pod \"cert-manager-cainjector-7f985d654d-zmshh\" (UID: \"2f9a5b46-b79d-496d-9a78-132d85f4f115\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-zmshh" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.208659 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2dvg\" (UniqueName: \"kubernetes.io/projected/c4c3d255-ffd5-4eab-99b7-d62d9b664a16-kube-api-access-t2dvg\") pod \"cert-manager-5b446d88c5-2fdfp\" (UID: \"c4c3d255-ffd5-4eab-99b7-d62d9b664a16\") " pod="cert-manager/cert-manager-5b446d88c5-2fdfp" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.208671 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6c99\" (UniqueName: \"kubernetes.io/projected/93fa4443-e685-41c3-9801-d2a37325674a-kube-api-access-q6c99\") pod \"cert-manager-webhook-5655c58dd6-m84qd\" (UID: \"93fa4443-e685-41c3-9801-d2a37325674a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-m84qd" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.209951 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7mtd\" (UniqueName: \"kubernetes.io/projected/2f9a5b46-b79d-496d-9a78-132d85f4f115-kube-api-access-j7mtd\") pod \"cert-manager-cainjector-7f985d654d-zmshh\" (UID: \"2f9a5b46-b79d-496d-9a78-132d85f4f115\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-zmshh" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.326237 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-2fdfp" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.331231 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-zmshh" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.339994 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-m84qd" Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.497712 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-zmshh"] Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.501965 4663 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.663436 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-2fdfp"] Dec 11 10:28:11 crc kubenswrapper[4663]: W1211 10:28:11.665101 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4c3d255_ffd5_4eab_99b7_d62d9b664a16.slice/crio-ee5f499f1c5798cd60903c80d819c758c3283c0e0bdd41fca26508b29280b253 WatchSource:0}: Error finding container ee5f499f1c5798cd60903c80d819c758c3283c0e0bdd41fca26508b29280b253: Status 404 returned error can't find the container with id ee5f499f1c5798cd60903c80d819c758c3283c0e0bdd41fca26508b29280b253 Dec 11 10:28:11 crc kubenswrapper[4663]: I1211 10:28:11.731435 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-m84qd"] Dec 11 10:28:11 crc kubenswrapper[4663]: W1211 10:28:11.735063 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93fa4443_e685_41c3_9801_d2a37325674a.slice/crio-31fb843e8e243c9a431ad3b0881224176a6e98cccd77479226d52077f8427ca1 WatchSource:0}: Error finding container 31fb843e8e243c9a431ad3b0881224176a6e98cccd77479226d52077f8427ca1: Status 404 returned error can't find the container with id 31fb843e8e243c9a431ad3b0881224176a6e98cccd77479226d52077f8427ca1 Dec 11 10:28:12 crc kubenswrapper[4663]: I1211 10:28:12.078578 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-2fdfp" event={"ID":"c4c3d255-ffd5-4eab-99b7-d62d9b664a16","Type":"ContainerStarted","Data":"ee5f499f1c5798cd60903c80d819c758c3283c0e0bdd41fca26508b29280b253"} Dec 11 10:28:12 crc kubenswrapper[4663]: I1211 10:28:12.079460 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-m84qd" event={"ID":"93fa4443-e685-41c3-9801-d2a37325674a","Type":"ContainerStarted","Data":"31fb843e8e243c9a431ad3b0881224176a6e98cccd77479226d52077f8427ca1"} Dec 11 10:28:12 crc kubenswrapper[4663]: I1211 10:28:12.080227 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-zmshh" event={"ID":"2f9a5b46-b79d-496d-9a78-132d85f4f115","Type":"ContainerStarted","Data":"383bdc80ebd66da147d64e88784bfa631bb616b1e88c901b33765e5544801bd3"} Dec 11 10:28:14 crc kubenswrapper[4663]: I1211 10:28:14.090813 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-zmshh" event={"ID":"2f9a5b46-b79d-496d-9a78-132d85f4f115","Type":"ContainerStarted","Data":"3e1fe33584bcddf700720b3ce9eff47d8e72880835ff25d809c736fe49050fc2"} Dec 11 10:28:14 crc kubenswrapper[4663]: I1211 10:28:14.106912 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-zmshh" podStartSLOduration=2.47119487 podStartE2EDuration="4.106899164s" podCreationTimestamp="2025-12-11 10:28:10 +0000 UTC" firstStartedPulling="2025-12-11 10:28:11.501782191 +0000 UTC m=+622.319711975" lastFinishedPulling="2025-12-11 10:28:13.137486485 +0000 UTC m=+623.955416269" observedRunningTime="2025-12-11 10:28:14.103099583 +0000 UTC m=+624.921029368" watchObservedRunningTime="2025-12-11 10:28:14.106899164 +0000 UTC m=+624.924828947" Dec 11 10:28:15 crc kubenswrapper[4663]: I1211 10:28:15.098407 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-2fdfp" event={"ID":"c4c3d255-ffd5-4eab-99b7-d62d9b664a16","Type":"ContainerStarted","Data":"723c665ce2fba05e969cecff1a40949ddd5e3b7629ea739f9e3ec7b9102a49e4"} Dec 11 10:28:15 crc kubenswrapper[4663]: I1211 10:28:15.100444 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-m84qd" event={"ID":"93fa4443-e685-41c3-9801-d2a37325674a","Type":"ContainerStarted","Data":"80f3ad050333a4f544c11b47d4f615e856bb622754d4d606fa2c0581f831f10a"} Dec 11 10:28:15 crc kubenswrapper[4663]: I1211 10:28:15.112086 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-2fdfp" podStartSLOduration=2.411456002 podStartE2EDuration="5.112075136s" podCreationTimestamp="2025-12-11 10:28:10 +0000 UTC" firstStartedPulling="2025-12-11 10:28:11.666895269 +0000 UTC m=+622.484825053" lastFinishedPulling="2025-12-11 10:28:14.367514393 +0000 UTC m=+625.185444187" observedRunningTime="2025-12-11 10:28:15.110126609 +0000 UTC m=+625.928056393" watchObservedRunningTime="2025-12-11 10:28:15.112075136 +0000 UTC m=+625.930004920" Dec 11 10:28:15 crc kubenswrapper[4663]: I1211 10:28:15.121951 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-m84qd" podStartSLOduration=2.514408112 podStartE2EDuration="5.121937303s" podCreationTimestamp="2025-12-11 10:28:10 +0000 UTC" firstStartedPulling="2025-12-11 10:28:11.736896236 +0000 UTC m=+622.554826020" lastFinishedPulling="2025-12-11 10:28:14.344425428 +0000 UTC m=+625.162355211" observedRunningTime="2025-12-11 10:28:15.120914017 +0000 UTC m=+625.938843801" watchObservedRunningTime="2025-12-11 10:28:15.121937303 +0000 UTC m=+625.939867087" Dec 11 10:28:16 crc kubenswrapper[4663]: I1211 10:28:16.106700 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-m84qd" Dec 11 10:28:21 crc kubenswrapper[4663]: I1211 10:28:21.342770 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-m84qd" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.442133 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-z7882"] Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.442474 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovn-controller" containerID="cri-o://fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024" gracePeriod=30 Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.442569 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e" gracePeriod=30 Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.442597 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="northd" containerID="cri-o://84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b" gracePeriod=30 Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.442623 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovn-acl-logging" containerID="cri-o://430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729" gracePeriod=30 Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.442616 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="kube-rbac-proxy-node" containerID="cri-o://1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522" gracePeriod=30 Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.442603 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="sbdb" containerID="cri-o://352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e" gracePeriod=30 Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.442556 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="nbdb" containerID="cri-o://f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15" gracePeriod=30 Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.467694 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" containerID="cri-o://e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6" gracePeriod=30 Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.692790 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/3.log" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.694387 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovn-acl-logging/0.log" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.694758 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovn-controller/0.log" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.695105 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.724439 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-run-ovn-kubernetes\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.724606 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-node-log\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.724496 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.724687 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-node-log" (OuterVolumeSpecName: "node-log") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.724751 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.724689 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.724911 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-kubelet\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.724991 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-systemd-units\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.724947 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725104 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxx7n\" (UniqueName: \"kubernetes.io/projected/4f739f0f-4546-4db5-809d-435823288bd8-kube-api-access-lxx7n\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725221 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-openvswitch\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725130 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725281 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725288 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-ovn\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725344 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-ovnkube-script-lib\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725370 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-ovnkube-config\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725391 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-slash\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725422 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-log-socket\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725438 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-etc-openvswitch\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725473 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-cni-netd\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725489 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-var-lib-openvswitch\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725507 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-cni-bin\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725524 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-env-overrides\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725563 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-run-netns\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725579 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-systemd\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725606 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4f739f0f-4546-4db5-809d-435823288bd8-ovn-node-metrics-cert\") pod \"4f739f0f-4546-4db5-809d-435823288bd8\" (UID: \"4f739f0f-4546-4db5-809d-435823288bd8\") " Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725712 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725795 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725831 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-slash" (OuterVolumeSpecName: "host-slash") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725893 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725863 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-log-socket" (OuterVolumeSpecName: "log-socket") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725932 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726027 4663 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726047 4663 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726058 4663 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726071 4663 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726080 4663 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-node-log\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726088 4663 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726097 4663 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726104 4663 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726113 4663 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726120 4663 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-slash\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726207 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725964 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.725970 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726372 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.726393 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.730429 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f739f0f-4546-4db5-809d-435823288bd8-kube-api-access-lxx7n" (OuterVolumeSpecName: "kube-api-access-lxx7n") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "kube-api-access-lxx7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.730492 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f739f0f-4546-4db5-809d-435823288bd8-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.730902 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-s72j9"] Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731073 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="sbdb" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731101 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="sbdb" Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731111 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="nbdb" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731117 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="nbdb" Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731125 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="northd" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731131 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="northd" Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731140 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731146 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731153 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="kubecfg-setup" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731158 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="kubecfg-setup" Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731167 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731172 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731200 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731206 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731213 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731219 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731226 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="kube-rbac-proxy-node" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731232 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="kube-rbac-proxy-node" Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731240 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovn-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731393 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovn-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731407 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovn-acl-logging" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731413 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovn-acl-logging" Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731423 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="kube-rbac-proxy-ovn-metrics" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731428 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="kube-rbac-proxy-ovn-metrics" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731541 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="nbdb" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731550 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731556 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="kube-rbac-proxy-ovn-metrics" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731563 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731595 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731614 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovn-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731622 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="northd" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731632 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="kube-rbac-proxy-node" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731639 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="sbdb" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731647 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovn-acl-logging" Dec 11 10:28:22 crc kubenswrapper[4663]: E1211 10:28:22.731751 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731758 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731849 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.731863 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f739f0f-4546-4db5-809d-435823288bd8" containerName="ovnkube-controller" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.733139 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.736034 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "4f739f0f-4546-4db5-809d-435823288bd8" (UID: "4f739f0f-4546-4db5-809d-435823288bd8"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.827650 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-cni-netd\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.827868 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-etc-openvswitch\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.827960 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-cni-bin\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828036 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2lmw\" (UniqueName: \"kubernetes.io/projected/f7d5d1e6-1d08-4440-b532-35503b93f22e-kube-api-access-k2lmw\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828119 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7d5d1e6-1d08-4440-b532-35503b93f22e-ovnkube-config\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828205 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-run-systemd\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828276 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-slash\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828352 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-run-ovn\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828428 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-run-openvswitch\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828498 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-var-lib-openvswitch\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828559 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7d5d1e6-1d08-4440-b532-35503b93f22e-ovnkube-script-lib\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828636 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-run-netns\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828703 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-systemd-units\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828769 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-run-ovn-kubernetes\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828858 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-kubelet\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.828939 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829010 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7d5d1e6-1d08-4440-b532-35503b93f22e-ovn-node-metrics-cert\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829068 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-node-log\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829129 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7d5d1e6-1d08-4440-b532-35503b93f22e-env-overrides\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829211 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-log-socket\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829334 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxx7n\" (UniqueName: \"kubernetes.io/projected/4f739f0f-4546-4db5-809d-435823288bd8-kube-api-access-lxx7n\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829403 4663 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829450 4663 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829499 4663 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-log-socket\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829542 4663 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829585 4663 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4f739f0f-4546-4db5-809d-435823288bd8-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829630 4663 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829677 4663 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829724 4663 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4f739f0f-4546-4db5-809d-435823288bd8-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.829771 4663 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4f739f0f-4546-4db5-809d-435823288bd8-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.930609 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-etc-openvswitch\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.930768 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-cni-bin\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.930865 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-cni-bin\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.930938 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2lmw\" (UniqueName: \"kubernetes.io/projected/f7d5d1e6-1d08-4440-b532-35503b93f22e-kube-api-access-k2lmw\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931031 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7d5d1e6-1d08-4440-b532-35503b93f22e-ovnkube-config\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.930675 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-etc-openvswitch\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931141 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-run-systemd\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931209 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-run-systemd\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931237 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-slash\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931281 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-run-ovn\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931308 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-run-openvswitch\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931341 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-var-lib-openvswitch\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931362 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7d5d1e6-1d08-4440-b532-35503b93f22e-ovnkube-script-lib\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931391 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-run-netns\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931397 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-run-openvswitch\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931415 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-systemd-units\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931361 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-run-ovn\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931435 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-run-ovn-kubernetes\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931452 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-run-netns\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931458 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-kubelet\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931467 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-var-lib-openvswitch\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931490 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-run-ovn-kubernetes\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931474 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-systemd-units\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931502 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-kubelet\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931525 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931556 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7d5d1e6-1d08-4440-b532-35503b93f22e-env-overrides\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931571 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7d5d1e6-1d08-4440-b532-35503b93f22e-ovn-node-metrics-cert\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931587 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-node-log\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931609 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-log-socket\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931627 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-cni-netd\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931705 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-cni-netd\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931726 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931726 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7d5d1e6-1d08-4440-b532-35503b93f22e-ovnkube-config\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931754 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-node-log\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931773 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-log-socket\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931922 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7d5d1e6-1d08-4440-b532-35503b93f22e-host-slash\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.931978 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7d5d1e6-1d08-4440-b532-35503b93f22e-ovnkube-script-lib\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.932115 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7d5d1e6-1d08-4440-b532-35503b93f22e-env-overrides\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.934329 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7d5d1e6-1d08-4440-b532-35503b93f22e-ovn-node-metrics-cert\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:22 crc kubenswrapper[4663]: I1211 10:28:22.943877 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2lmw\" (UniqueName: \"kubernetes.io/projected/f7d5d1e6-1d08-4440-b532-35503b93f22e-kube-api-access-k2lmw\") pod \"ovnkube-node-s72j9\" (UID: \"f7d5d1e6-1d08-4440-b532-35503b93f22e\") " pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.051938 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:23 crc kubenswrapper[4663]: W1211 10:28:23.067430 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7d5d1e6_1d08_4440_b532_35503b93f22e.slice/crio-5a04f13b75601bfb4c353c1e80b821f4ea7940a0af47f0386b6f2c84e08c5445 WatchSource:0}: Error finding container 5a04f13b75601bfb4c353c1e80b821f4ea7940a0af47f0386b6f2c84e08c5445: Status 404 returned error can't find the container with id 5a04f13b75601bfb4c353c1e80b821f4ea7940a0af47f0386b6f2c84e08c5445 Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.135250 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovnkube-controller/3.log" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.137219 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovn-acl-logging/0.log" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.137625 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-z7882_4f739f0f-4546-4db5-809d-435823288bd8/ovn-controller/0.log" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.137998 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6" exitCode=0 Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138020 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e" exitCode=0 Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138029 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15" exitCode=0 Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138025 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138064 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138077 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138087 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138036 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b" exitCode=0 Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138123 4663 scope.go:117] "RemoveContainer" containerID="e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138131 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138124 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e" exitCode=0 Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138198 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522" exitCode=0 Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138207 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729" exitCode=143 Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138213 4663 generic.go:334] "Generic (PLEG): container finished" podID="4f739f0f-4546-4db5-809d-435823288bd8" containerID="fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024" exitCode=143 Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138246 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138270 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138268 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138363 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138370 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138376 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138381 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138385 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138391 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138396 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138401 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138409 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138417 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138424 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138429 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138434 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138438 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138443 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138447 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138452 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138457 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138461 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138469 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138476 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138482 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138487 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138491 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138496 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138500 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138505 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138509 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138513 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138518 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138524 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z7882" event={"ID":"4f739f0f-4546-4db5-809d-435823288bd8","Type":"ContainerDied","Data":"b536d6d362cb3ab7fc77a155b44aba445d87975bf29d5587132739459136a578"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138530 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138535 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138540 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138544 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138549 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138554 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138558 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138563 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138567 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.138571 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.140097 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-qv7jl_43cbb02f-aa8f-41aa-bd0a-8042aca3002c/kube-multus/2.log" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.140542 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-qv7jl_43cbb02f-aa8f-41aa-bd0a-8042aca3002c/kube-multus/1.log" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.140588 4663 generic.go:334] "Generic (PLEG): container finished" podID="43cbb02f-aa8f-41aa-bd0a-8042aca3002c" containerID="bbef5c59739cbf708221920dc1d3eba9725c081535cabff905d9eddf68fda315" exitCode=2 Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.140653 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-qv7jl" event={"ID":"43cbb02f-aa8f-41aa-bd0a-8042aca3002c","Type":"ContainerDied","Data":"bbef5c59739cbf708221920dc1d3eba9725c081535cabff905d9eddf68fda315"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.140676 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.140948 4663 scope.go:117] "RemoveContainer" containerID="bbef5c59739cbf708221920dc1d3eba9725c081535cabff905d9eddf68fda315" Dec 11 10:28:23 crc kubenswrapper[4663]: E1211 10:28:23.141123 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-qv7jl_openshift-multus(43cbb02f-aa8f-41aa-bd0a-8042aca3002c)\"" pod="openshift-multus/multus-qv7jl" podUID="43cbb02f-aa8f-41aa-bd0a-8042aca3002c" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.150089 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" event={"ID":"f7d5d1e6-1d08-4440-b532-35503b93f22e","Type":"ContainerStarted","Data":"1287a6f1d241c1e1be34bfb98097696676b91d02a0e884cd146a56e4f81020a8"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.150122 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" event={"ID":"f7d5d1e6-1d08-4440-b532-35503b93f22e","Type":"ContainerStarted","Data":"5a04f13b75601bfb4c353c1e80b821f4ea7940a0af47f0386b6f2c84e08c5445"} Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.185940 4663 scope.go:117] "RemoveContainer" containerID="0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.204944 4663 scope.go:117] "RemoveContainer" containerID="352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.207853 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-z7882"] Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.210994 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-z7882"] Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.222718 4663 scope.go:117] "RemoveContainer" containerID="f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.232134 4663 scope.go:117] "RemoveContainer" containerID="84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.245504 4663 scope.go:117] "RemoveContainer" containerID="ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.260795 4663 scope.go:117] "RemoveContainer" containerID="1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.269985 4663 scope.go:117] "RemoveContainer" containerID="430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.289323 4663 scope.go:117] "RemoveContainer" containerID="fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.301140 4663 scope.go:117] "RemoveContainer" containerID="848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.311512 4663 scope.go:117] "RemoveContainer" containerID="e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6" Dec 11 10:28:23 crc kubenswrapper[4663]: E1211 10:28:23.311894 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6\": container with ID starting with e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6 not found: ID does not exist" containerID="e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.311924 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6"} err="failed to get container status \"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6\": rpc error: code = NotFound desc = could not find container \"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6\": container with ID starting with e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.311941 4663 scope.go:117] "RemoveContainer" containerID="0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e" Dec 11 10:28:23 crc kubenswrapper[4663]: E1211 10:28:23.312255 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\": container with ID starting with 0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e not found: ID does not exist" containerID="0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.312301 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e"} err="failed to get container status \"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\": rpc error: code = NotFound desc = could not find container \"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\": container with ID starting with 0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.312317 4663 scope.go:117] "RemoveContainer" containerID="352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e" Dec 11 10:28:23 crc kubenswrapper[4663]: E1211 10:28:23.312676 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\": container with ID starting with 352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e not found: ID does not exist" containerID="352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.312699 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e"} err="failed to get container status \"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\": rpc error: code = NotFound desc = could not find container \"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\": container with ID starting with 352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.312712 4663 scope.go:117] "RemoveContainer" containerID="f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15" Dec 11 10:28:23 crc kubenswrapper[4663]: E1211 10:28:23.313099 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\": container with ID starting with f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15 not found: ID does not exist" containerID="f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.313134 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15"} err="failed to get container status \"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\": rpc error: code = NotFound desc = could not find container \"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\": container with ID starting with f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.313158 4663 scope.go:117] "RemoveContainer" containerID="84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b" Dec 11 10:28:23 crc kubenswrapper[4663]: E1211 10:28:23.314405 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\": container with ID starting with 84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b not found: ID does not exist" containerID="84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.314429 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b"} err="failed to get container status \"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\": rpc error: code = NotFound desc = could not find container \"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\": container with ID starting with 84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.314443 4663 scope.go:117] "RemoveContainer" containerID="ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e" Dec 11 10:28:23 crc kubenswrapper[4663]: E1211 10:28:23.314665 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\": container with ID starting with ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e not found: ID does not exist" containerID="ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.314695 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e"} err="failed to get container status \"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\": rpc error: code = NotFound desc = could not find container \"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\": container with ID starting with ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.314707 4663 scope.go:117] "RemoveContainer" containerID="1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522" Dec 11 10:28:23 crc kubenswrapper[4663]: E1211 10:28:23.315249 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\": container with ID starting with 1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522 not found: ID does not exist" containerID="1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.315282 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522"} err="failed to get container status \"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\": rpc error: code = NotFound desc = could not find container \"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\": container with ID starting with 1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.315306 4663 scope.go:117] "RemoveContainer" containerID="430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729" Dec 11 10:28:23 crc kubenswrapper[4663]: E1211 10:28:23.315903 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\": container with ID starting with 430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729 not found: ID does not exist" containerID="430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.315921 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729"} err="failed to get container status \"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\": rpc error: code = NotFound desc = could not find container \"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\": container with ID starting with 430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.315935 4663 scope.go:117] "RemoveContainer" containerID="fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024" Dec 11 10:28:23 crc kubenswrapper[4663]: E1211 10:28:23.317034 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\": container with ID starting with fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024 not found: ID does not exist" containerID="fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.317061 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024"} err="failed to get container status \"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\": rpc error: code = NotFound desc = could not find container \"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\": container with ID starting with fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.317080 4663 scope.go:117] "RemoveContainer" containerID="848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022" Dec 11 10:28:23 crc kubenswrapper[4663]: E1211 10:28:23.317490 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\": container with ID starting with 848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022 not found: ID does not exist" containerID="848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.317514 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022"} err="failed to get container status \"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\": rpc error: code = NotFound desc = could not find container \"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\": container with ID starting with 848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.317528 4663 scope.go:117] "RemoveContainer" containerID="e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.317753 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6"} err="failed to get container status \"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6\": rpc error: code = NotFound desc = could not find container \"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6\": container with ID starting with e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.317772 4663 scope.go:117] "RemoveContainer" containerID="0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.317987 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e"} err="failed to get container status \"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\": rpc error: code = NotFound desc = could not find container \"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\": container with ID starting with 0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.318001 4663 scope.go:117] "RemoveContainer" containerID="352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.318160 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e"} err="failed to get container status \"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\": rpc error: code = NotFound desc = could not find container \"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\": container with ID starting with 352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.318174 4663 scope.go:117] "RemoveContainer" containerID="f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.318361 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15"} err="failed to get container status \"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\": rpc error: code = NotFound desc = could not find container \"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\": container with ID starting with f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.318374 4663 scope.go:117] "RemoveContainer" containerID="84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.318527 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b"} err="failed to get container status \"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\": rpc error: code = NotFound desc = could not find container \"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\": container with ID starting with 84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.318539 4663 scope.go:117] "RemoveContainer" containerID="ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.318696 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e"} err="failed to get container status \"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\": rpc error: code = NotFound desc = could not find container \"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\": container with ID starting with ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.318708 4663 scope.go:117] "RemoveContainer" containerID="1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.319391 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522"} err="failed to get container status \"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\": rpc error: code = NotFound desc = could not find container \"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\": container with ID starting with 1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.319411 4663 scope.go:117] "RemoveContainer" containerID="430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.320113 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729"} err="failed to get container status \"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\": rpc error: code = NotFound desc = could not find container \"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\": container with ID starting with 430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.320132 4663 scope.go:117] "RemoveContainer" containerID="fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.320308 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024"} err="failed to get container status \"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\": rpc error: code = NotFound desc = could not find container \"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\": container with ID starting with fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.320326 4663 scope.go:117] "RemoveContainer" containerID="848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.320510 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022"} err="failed to get container status \"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\": rpc error: code = NotFound desc = could not find container \"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\": container with ID starting with 848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.320530 4663 scope.go:117] "RemoveContainer" containerID="e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.320729 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6"} err="failed to get container status \"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6\": rpc error: code = NotFound desc = could not find container \"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6\": container with ID starting with e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.320746 4663 scope.go:117] "RemoveContainer" containerID="0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.320919 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e"} err="failed to get container status \"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\": rpc error: code = NotFound desc = could not find container \"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\": container with ID starting with 0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.320936 4663 scope.go:117] "RemoveContainer" containerID="352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.321098 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e"} err="failed to get container status \"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\": rpc error: code = NotFound desc = could not find container \"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\": container with ID starting with 352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.321116 4663 scope.go:117] "RemoveContainer" containerID="f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.321292 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15"} err="failed to get container status \"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\": rpc error: code = NotFound desc = could not find container \"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\": container with ID starting with f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.321309 4663 scope.go:117] "RemoveContainer" containerID="84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.321758 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b"} err="failed to get container status \"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\": rpc error: code = NotFound desc = could not find container \"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\": container with ID starting with 84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.321775 4663 scope.go:117] "RemoveContainer" containerID="ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.321935 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e"} err="failed to get container status \"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\": rpc error: code = NotFound desc = could not find container \"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\": container with ID starting with ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.321953 4663 scope.go:117] "RemoveContainer" containerID="1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.322088 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522"} err="failed to get container status \"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\": rpc error: code = NotFound desc = could not find container \"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\": container with ID starting with 1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.322105 4663 scope.go:117] "RemoveContainer" containerID="430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.322260 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729"} err="failed to get container status \"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\": rpc error: code = NotFound desc = could not find container \"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\": container with ID starting with 430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.322277 4663 scope.go:117] "RemoveContainer" containerID="fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.322451 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024"} err="failed to get container status \"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\": rpc error: code = NotFound desc = could not find container \"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\": container with ID starting with fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.322468 4663 scope.go:117] "RemoveContainer" containerID="848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.322611 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022"} err="failed to get container status \"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\": rpc error: code = NotFound desc = could not find container \"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\": container with ID starting with 848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.322643 4663 scope.go:117] "RemoveContainer" containerID="e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.322803 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6"} err="failed to get container status \"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6\": rpc error: code = NotFound desc = could not find container \"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6\": container with ID starting with e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.322820 4663 scope.go:117] "RemoveContainer" containerID="0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.322968 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e"} err="failed to get container status \"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\": rpc error: code = NotFound desc = could not find container \"0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e\": container with ID starting with 0963c6c1df39ea1ae72e428e60b76ab3c5677d293f2fc2dd2f19bf3807a0dd6e not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.322985 4663 scope.go:117] "RemoveContainer" containerID="352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.323150 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e"} err="failed to get container status \"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\": rpc error: code = NotFound desc = could not find container \"352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e\": container with ID starting with 352da67b62d882148c86d01610019b6c7908a62700effe248432aeb0dd13b29e not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.323166 4663 scope.go:117] "RemoveContainer" containerID="f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.323367 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15"} err="failed to get container status \"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\": rpc error: code = NotFound desc = could not find container \"f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15\": container with ID starting with f9a09bb981db693d1ab725d2fa76cba7b59600a96b888e1a2b4f64e5af5b2b15 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.323391 4663 scope.go:117] "RemoveContainer" containerID="84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.323627 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b"} err="failed to get container status \"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\": rpc error: code = NotFound desc = could not find container \"84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b\": container with ID starting with 84fd59d33b64d1211bfe2754377534ed00ca078356eeb674fcd5d2d25761500b not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.323644 4663 scope.go:117] "RemoveContainer" containerID="ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.323824 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e"} err="failed to get container status \"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\": rpc error: code = NotFound desc = could not find container \"ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e\": container with ID starting with ad34867d7624a689cd263981b4d8631b3074470ece99eec2188eaf1cda70205e not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.323839 4663 scope.go:117] "RemoveContainer" containerID="1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.323999 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522"} err="failed to get container status \"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\": rpc error: code = NotFound desc = could not find container \"1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522\": container with ID starting with 1d7a5d21f8ebe7691fa3f9e986248e9a5831fb3c1d4ba294ebce482fb58ad522 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.324016 4663 scope.go:117] "RemoveContainer" containerID="430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.324247 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729"} err="failed to get container status \"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\": rpc error: code = NotFound desc = could not find container \"430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729\": container with ID starting with 430533893b94d07695686a015d3445c93ce424e7296a959cc57b16f1c8774729 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.324264 4663 scope.go:117] "RemoveContainer" containerID="fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.324556 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024"} err="failed to get container status \"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\": rpc error: code = NotFound desc = could not find container \"fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024\": container with ID starting with fcad57dcbe6bf2a4f54ad9c3adfeceae411639f52b718f02cb3831c57a54b024 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.324574 4663 scope.go:117] "RemoveContainer" containerID="848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.324766 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022"} err="failed to get container status \"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\": rpc error: code = NotFound desc = could not find container \"848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022\": container with ID starting with 848d151aad28329f0e8e5ef53c8d254e5ddb35d6423a38a530c96cde057e4022 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.324796 4663 scope.go:117] "RemoveContainer" containerID="e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.325094 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6"} err="failed to get container status \"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6\": rpc error: code = NotFound desc = could not find container \"e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6\": container with ID starting with e5e3fa7340c49bbb65191e085175c9957df030d106c3870d12c90958223a80d6 not found: ID does not exist" Dec 11 10:28:23 crc kubenswrapper[4663]: I1211 10:28:23.510374 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f739f0f-4546-4db5-809d-435823288bd8" path="/var/lib/kubelet/pods/4f739f0f-4546-4db5-809d-435823288bd8/volumes" Dec 11 10:28:24 crc kubenswrapper[4663]: I1211 10:28:24.157910 4663 generic.go:334] "Generic (PLEG): container finished" podID="f7d5d1e6-1d08-4440-b532-35503b93f22e" containerID="1287a6f1d241c1e1be34bfb98097696676b91d02a0e884cd146a56e4f81020a8" exitCode=0 Dec 11 10:28:24 crc kubenswrapper[4663]: I1211 10:28:24.157984 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" event={"ID":"f7d5d1e6-1d08-4440-b532-35503b93f22e","Type":"ContainerDied","Data":"1287a6f1d241c1e1be34bfb98097696676b91d02a0e884cd146a56e4f81020a8"} Dec 11 10:28:24 crc kubenswrapper[4663]: I1211 10:28:24.158215 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" event={"ID":"f7d5d1e6-1d08-4440-b532-35503b93f22e","Type":"ContainerStarted","Data":"431fbf2b85736342a17ec6a6d263cfd65337cc0104f98988f3f7d133c376c153"} Dec 11 10:28:24 crc kubenswrapper[4663]: I1211 10:28:24.158231 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" event={"ID":"f7d5d1e6-1d08-4440-b532-35503b93f22e","Type":"ContainerStarted","Data":"d8fff4a293f4ad69a0ce8e5994bc0039f8cfb51e5a615d15191a55a196b736b6"} Dec 11 10:28:24 crc kubenswrapper[4663]: I1211 10:28:24.158240 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" event={"ID":"f7d5d1e6-1d08-4440-b532-35503b93f22e","Type":"ContainerStarted","Data":"450efe72446c045d033efdef273d96e3da2500cbe6bed5973bfd361e3092718b"} Dec 11 10:28:24 crc kubenswrapper[4663]: I1211 10:28:24.158248 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" event={"ID":"f7d5d1e6-1d08-4440-b532-35503b93f22e","Type":"ContainerStarted","Data":"323cb811c220abfe043b97ded03d68d69f23dd29ff7d642786b1dd23ac652100"} Dec 11 10:28:24 crc kubenswrapper[4663]: I1211 10:28:24.158258 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" event={"ID":"f7d5d1e6-1d08-4440-b532-35503b93f22e","Type":"ContainerStarted","Data":"7c3c72481468bf5852ae8618b9e646cc36fef05cff1aac5e428139a16968a4c1"} Dec 11 10:28:24 crc kubenswrapper[4663]: I1211 10:28:24.158266 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" event={"ID":"f7d5d1e6-1d08-4440-b532-35503b93f22e","Type":"ContainerStarted","Data":"755ce70252571d9e49167d6853f1d085a65c043550f81f9f08347b6ecc210154"} Dec 11 10:28:26 crc kubenswrapper[4663]: I1211 10:28:26.168668 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" event={"ID":"f7d5d1e6-1d08-4440-b532-35503b93f22e","Type":"ContainerStarted","Data":"eb3936e323f8952e0a1f0033c94b922176628ccc8e478c20ff36ad36781dbae0"} Dec 11 10:28:28 crc kubenswrapper[4663]: I1211 10:28:28.178811 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" event={"ID":"f7d5d1e6-1d08-4440-b532-35503b93f22e","Type":"ContainerStarted","Data":"dd0da704fa28230c62f1bb0e8708cb145e6ee4a9a8b95065ae64859461e1b3ac"} Dec 11 10:28:28 crc kubenswrapper[4663]: I1211 10:28:28.179432 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:28 crc kubenswrapper[4663]: I1211 10:28:28.179445 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:28 crc kubenswrapper[4663]: I1211 10:28:28.179453 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:28 crc kubenswrapper[4663]: I1211 10:28:28.199084 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:28 crc kubenswrapper[4663]: I1211 10:28:28.201739 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:28 crc kubenswrapper[4663]: I1211 10:28:28.201738 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" podStartSLOduration=6.201724677 podStartE2EDuration="6.201724677s" podCreationTimestamp="2025-12-11 10:28:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:28:28.19999421 +0000 UTC m=+639.017923994" watchObservedRunningTime="2025-12-11 10:28:28.201724677 +0000 UTC m=+639.019654461" Dec 11 10:28:34 crc kubenswrapper[4663]: I1211 10:28:34.505246 4663 scope.go:117] "RemoveContainer" containerID="bbef5c59739cbf708221920dc1d3eba9725c081535cabff905d9eddf68fda315" Dec 11 10:28:34 crc kubenswrapper[4663]: E1211 10:28:34.506382 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-qv7jl_openshift-multus(43cbb02f-aa8f-41aa-bd0a-8042aca3002c)\"" pod="openshift-multus/multus-qv7jl" podUID="43cbb02f-aa8f-41aa-bd0a-8042aca3002c" Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.102251 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5"] Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.103635 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.106446 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.109000 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5"] Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.155786 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a5d1796b-fb33-4fc8-9f29-f3847c874051-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5\" (UID: \"a5d1796b-fb33-4fc8-9f29-f3847c874051\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.155853 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a5d1796b-fb33-4fc8-9f29-f3847c874051-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5\" (UID: \"a5d1796b-fb33-4fc8-9f29-f3847c874051\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.155917 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ll8d4\" (UniqueName: \"kubernetes.io/projected/a5d1796b-fb33-4fc8-9f29-f3847c874051-kube-api-access-ll8d4\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5\" (UID: \"a5d1796b-fb33-4fc8-9f29-f3847c874051\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.256959 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ll8d4\" (UniqueName: \"kubernetes.io/projected/a5d1796b-fb33-4fc8-9f29-f3847c874051-kube-api-access-ll8d4\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5\" (UID: \"a5d1796b-fb33-4fc8-9f29-f3847c874051\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.257014 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a5d1796b-fb33-4fc8-9f29-f3847c874051-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5\" (UID: \"a5d1796b-fb33-4fc8-9f29-f3847c874051\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.257045 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a5d1796b-fb33-4fc8-9f29-f3847c874051-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5\" (UID: \"a5d1796b-fb33-4fc8-9f29-f3847c874051\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.257398 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a5d1796b-fb33-4fc8-9f29-f3847c874051-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5\" (UID: \"a5d1796b-fb33-4fc8-9f29-f3847c874051\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.257536 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a5d1796b-fb33-4fc8-9f29-f3847c874051-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5\" (UID: \"a5d1796b-fb33-4fc8-9f29-f3847c874051\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.272725 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ll8d4\" (UniqueName: \"kubernetes.io/projected/a5d1796b-fb33-4fc8-9f29-f3847c874051-kube-api-access-ll8d4\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5\" (UID: \"a5d1796b-fb33-4fc8-9f29-f3847c874051\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: I1211 10:28:43.419100 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: E1211 10:28:43.443617 4663 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_openshift-marketplace_a5d1796b-fb33-4fc8-9f29-f3847c874051_0(97af05a0924eba22c31242e5f69e6cab94aa84f69d92970f6d718d49c0f53354): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 11 10:28:43 crc kubenswrapper[4663]: E1211 10:28:43.443675 4663 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_openshift-marketplace_a5d1796b-fb33-4fc8-9f29-f3847c874051_0(97af05a0924eba22c31242e5f69e6cab94aa84f69d92970f6d718d49c0f53354): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: E1211 10:28:43.443694 4663 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_openshift-marketplace_a5d1796b-fb33-4fc8-9f29-f3847c874051_0(97af05a0924eba22c31242e5f69e6cab94aa84f69d92970f6d718d49c0f53354): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:43 crc kubenswrapper[4663]: E1211 10:28:43.443735 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_openshift-marketplace(a5d1796b-fb33-4fc8-9f29-f3847c874051)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_openshift-marketplace(a5d1796b-fb33-4fc8-9f29-f3847c874051)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_openshift-marketplace_a5d1796b-fb33-4fc8-9f29-f3847c874051_0(97af05a0924eba22c31242e5f69e6cab94aa84f69d92970f6d718d49c0f53354): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" podUID="a5d1796b-fb33-4fc8-9f29-f3847c874051" Dec 11 10:28:44 crc kubenswrapper[4663]: I1211 10:28:44.239350 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:44 crc kubenswrapper[4663]: I1211 10:28:44.240248 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:44 crc kubenswrapper[4663]: E1211 10:28:44.257028 4663 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_openshift-marketplace_a5d1796b-fb33-4fc8-9f29-f3847c874051_0(144a0aa973fccde8775bcb66cf1724a404a69c93b3581329c6f5530754ddf056): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 11 10:28:44 crc kubenswrapper[4663]: E1211 10:28:44.257091 4663 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_openshift-marketplace_a5d1796b-fb33-4fc8-9f29-f3847c874051_0(144a0aa973fccde8775bcb66cf1724a404a69c93b3581329c6f5530754ddf056): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:44 crc kubenswrapper[4663]: E1211 10:28:44.257114 4663 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_openshift-marketplace_a5d1796b-fb33-4fc8-9f29-f3847c874051_0(144a0aa973fccde8775bcb66cf1724a404a69c93b3581329c6f5530754ddf056): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:44 crc kubenswrapper[4663]: E1211 10:28:44.257163 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_openshift-marketplace(a5d1796b-fb33-4fc8-9f29-f3847c874051)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_openshift-marketplace(a5d1796b-fb33-4fc8-9f29-f3847c874051)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_openshift-marketplace_a5d1796b-fb33-4fc8-9f29-f3847c874051_0(144a0aa973fccde8775bcb66cf1724a404a69c93b3581329c6f5530754ddf056): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" podUID="a5d1796b-fb33-4fc8-9f29-f3847c874051" Dec 11 10:28:46 crc kubenswrapper[4663]: I1211 10:28:46.504505 4663 scope.go:117] "RemoveContainer" containerID="bbef5c59739cbf708221920dc1d3eba9725c081535cabff905d9eddf68fda315" Dec 11 10:28:47 crc kubenswrapper[4663]: I1211 10:28:47.251591 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-qv7jl_43cbb02f-aa8f-41aa-bd0a-8042aca3002c/kube-multus/2.log" Dec 11 10:28:47 crc kubenswrapper[4663]: I1211 10:28:47.252193 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-qv7jl_43cbb02f-aa8f-41aa-bd0a-8042aca3002c/kube-multus/1.log" Dec 11 10:28:47 crc kubenswrapper[4663]: I1211 10:28:47.252232 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-qv7jl" event={"ID":"43cbb02f-aa8f-41aa-bd0a-8042aca3002c","Type":"ContainerStarted","Data":"44acc4eb961a29f7702c8ec90c6b55777339732f7a24609a0c3b9717f9d345d3"} Dec 11 10:28:49 crc kubenswrapper[4663]: I1211 10:28:49.674078 4663 scope.go:117] "RemoveContainer" containerID="60bc0fe12622361f71172b9c28087be13fe7c722952d22af2982c439d19c7a73" Dec 11 10:28:50 crc kubenswrapper[4663]: I1211 10:28:50.263684 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-qv7jl_43cbb02f-aa8f-41aa-bd0a-8042aca3002c/kube-multus/2.log" Dec 11 10:28:53 crc kubenswrapper[4663]: I1211 10:28:53.069417 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s72j9" Dec 11 10:28:59 crc kubenswrapper[4663]: I1211 10:28:59.506242 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:59 crc kubenswrapper[4663]: I1211 10:28:59.506905 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:28:59 crc kubenswrapper[4663]: I1211 10:28:59.839545 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5"] Dec 11 10:29:00 crc kubenswrapper[4663]: I1211 10:29:00.307956 4663 generic.go:334] "Generic (PLEG): container finished" podID="a5d1796b-fb33-4fc8-9f29-f3847c874051" containerID="f11bb1aa092b8ffdfeb3aad104995ac063bd495ca2b84e1ed43d1c48ac42c848" exitCode=0 Dec 11 10:29:00 crc kubenswrapper[4663]: I1211 10:29:00.308118 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" event={"ID":"a5d1796b-fb33-4fc8-9f29-f3847c874051","Type":"ContainerDied","Data":"f11bb1aa092b8ffdfeb3aad104995ac063bd495ca2b84e1ed43d1c48ac42c848"} Dec 11 10:29:00 crc kubenswrapper[4663]: I1211 10:29:00.308268 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" event={"ID":"a5d1796b-fb33-4fc8-9f29-f3847c874051","Type":"ContainerStarted","Data":"f74ca8ac64ee412fe89f5cf9abb5323c4e7f859ac76c2e2bc278baa4de00ded9"} Dec 11 10:29:02 crc kubenswrapper[4663]: I1211 10:29:02.317054 4663 generic.go:334] "Generic (PLEG): container finished" podID="a5d1796b-fb33-4fc8-9f29-f3847c874051" containerID="339f173d5540485bed3ac4dc02d62f61a49db28d74d829c9ee9aaa2b135b5d6e" exitCode=0 Dec 11 10:29:02 crc kubenswrapper[4663]: I1211 10:29:02.317097 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" event={"ID":"a5d1796b-fb33-4fc8-9f29-f3847c874051","Type":"ContainerDied","Data":"339f173d5540485bed3ac4dc02d62f61a49db28d74d829c9ee9aaa2b135b5d6e"} Dec 11 10:29:03 crc kubenswrapper[4663]: I1211 10:29:03.323201 4663 generic.go:334] "Generic (PLEG): container finished" podID="a5d1796b-fb33-4fc8-9f29-f3847c874051" containerID="44a9116def5d12f631bfae4980b8136b92631ddb7834de9247e237db95ed2f21" exitCode=0 Dec 11 10:29:03 crc kubenswrapper[4663]: I1211 10:29:03.323237 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" event={"ID":"a5d1796b-fb33-4fc8-9f29-f3847c874051","Type":"ContainerDied","Data":"44a9116def5d12f631bfae4980b8136b92631ddb7834de9247e237db95ed2f21"} Dec 11 10:29:04 crc kubenswrapper[4663]: I1211 10:29:04.489211 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:29:04 crc kubenswrapper[4663]: I1211 10:29:04.586124 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a5d1796b-fb33-4fc8-9f29-f3847c874051-util\") pod \"a5d1796b-fb33-4fc8-9f29-f3847c874051\" (UID: \"a5d1796b-fb33-4fc8-9f29-f3847c874051\") " Dec 11 10:29:04 crc kubenswrapper[4663]: I1211 10:29:04.586174 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a5d1796b-fb33-4fc8-9f29-f3847c874051-bundle\") pod \"a5d1796b-fb33-4fc8-9f29-f3847c874051\" (UID: \"a5d1796b-fb33-4fc8-9f29-f3847c874051\") " Dec 11 10:29:04 crc kubenswrapper[4663]: I1211 10:29:04.586275 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ll8d4\" (UniqueName: \"kubernetes.io/projected/a5d1796b-fb33-4fc8-9f29-f3847c874051-kube-api-access-ll8d4\") pod \"a5d1796b-fb33-4fc8-9f29-f3847c874051\" (UID: \"a5d1796b-fb33-4fc8-9f29-f3847c874051\") " Dec 11 10:29:04 crc kubenswrapper[4663]: I1211 10:29:04.587983 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5d1796b-fb33-4fc8-9f29-f3847c874051-bundle" (OuterVolumeSpecName: "bundle") pod "a5d1796b-fb33-4fc8-9f29-f3847c874051" (UID: "a5d1796b-fb33-4fc8-9f29-f3847c874051"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:29:04 crc kubenswrapper[4663]: I1211 10:29:04.591241 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5d1796b-fb33-4fc8-9f29-f3847c874051-kube-api-access-ll8d4" (OuterVolumeSpecName: "kube-api-access-ll8d4") pod "a5d1796b-fb33-4fc8-9f29-f3847c874051" (UID: "a5d1796b-fb33-4fc8-9f29-f3847c874051"). InnerVolumeSpecName "kube-api-access-ll8d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:29:04 crc kubenswrapper[4663]: I1211 10:29:04.595756 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5d1796b-fb33-4fc8-9f29-f3847c874051-util" (OuterVolumeSpecName: "util") pod "a5d1796b-fb33-4fc8-9f29-f3847c874051" (UID: "a5d1796b-fb33-4fc8-9f29-f3847c874051"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:29:04 crc kubenswrapper[4663]: I1211 10:29:04.687350 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ll8d4\" (UniqueName: \"kubernetes.io/projected/a5d1796b-fb33-4fc8-9f29-f3847c874051-kube-api-access-ll8d4\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:04 crc kubenswrapper[4663]: I1211 10:29:04.687448 4663 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a5d1796b-fb33-4fc8-9f29-f3847c874051-util\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:04 crc kubenswrapper[4663]: I1211 10:29:04.687513 4663 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a5d1796b-fb33-4fc8-9f29-f3847c874051-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:05 crc kubenswrapper[4663]: I1211 10:29:05.332742 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" event={"ID":"a5d1796b-fb33-4fc8-9f29-f3847c874051","Type":"ContainerDied","Data":"f74ca8ac64ee412fe89f5cf9abb5323c4e7f859ac76c2e2bc278baa4de00ded9"} Dec 11 10:29:05 crc kubenswrapper[4663]: I1211 10:29:05.333020 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f74ca8ac64ee412fe89f5cf9abb5323c4e7f859ac76c2e2bc278baa4de00ded9" Dec 11 10:29:05 crc kubenswrapper[4663]: I1211 10:29:05.332792 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.484429 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-vp4bg"] Dec 11 10:29:15 crc kubenswrapper[4663]: E1211 10:29:15.484884 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5d1796b-fb33-4fc8-9f29-f3847c874051" containerName="extract" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.484897 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5d1796b-fb33-4fc8-9f29-f3847c874051" containerName="extract" Dec 11 10:29:15 crc kubenswrapper[4663]: E1211 10:29:15.484908 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5d1796b-fb33-4fc8-9f29-f3847c874051" containerName="pull" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.484913 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5d1796b-fb33-4fc8-9f29-f3847c874051" containerName="pull" Dec 11 10:29:15 crc kubenswrapper[4663]: E1211 10:29:15.484924 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5d1796b-fb33-4fc8-9f29-f3847c874051" containerName="util" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.484930 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5d1796b-fb33-4fc8-9f29-f3847c874051" containerName="util" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.485026 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5d1796b-fb33-4fc8-9f29-f3847c874051" containerName="extract" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.485396 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vp4bg" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.487392 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.487411 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.487480 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-85j27" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.494814 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-vp4bg"] Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.597749 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl"] Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.598405 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.600055 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-f22nq" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.600069 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.608952 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc"] Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.609553 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.612131 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl"] Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.614460 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvwcw\" (UniqueName: \"kubernetes.io/projected/cbb769f1-696a-4afa-8ef4-a3d230a07eda-kube-api-access-xvwcw\") pod \"obo-prometheus-operator-668cf9dfbb-vp4bg\" (UID: \"cbb769f1-696a-4afa-8ef4-a3d230a07eda\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vp4bg" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.632836 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc"] Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.712519 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-cm44b"] Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.713172 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" Dec 11 10:29:15 crc kubenswrapper[4663]: W1211 10:29:15.714759 4663 reflector.go:561] object-"openshift-operators"/"observability-operator-tls": failed to list *v1.Secret: secrets "observability-operator-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-operators": no relationship found between node 'crc' and this object Dec 11 10:29:15 crc kubenswrapper[4663]: E1211 10:29:15.714802 4663 reflector.go:158] "Unhandled Error" err="object-\"openshift-operators\"/\"observability-operator-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"observability-operator-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 10:29:15 crc kubenswrapper[4663]: W1211 10:29:15.715244 4663 reflector.go:561] object-"openshift-operators"/"observability-operator-sa-dockercfg-2qgkp": failed to list *v1.Secret: secrets "observability-operator-sa-dockercfg-2qgkp" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-operators": no relationship found between node 'crc' and this object Dec 11 10:29:15 crc kubenswrapper[4663]: E1211 10:29:15.715270 4663 reflector.go:158] "Unhandled Error" err="object-\"openshift-operators\"/\"observability-operator-sa-dockercfg-2qgkp\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"observability-operator-sa-dockercfg-2qgkp\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.715282 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4cafe8bf-4173-4b4a-b883-07b7450f9d92-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7dfbd95878-92svc\" (UID: \"4cafe8bf-4173-4b4a-b883-07b7450f9d92\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.715318 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvwcw\" (UniqueName: \"kubernetes.io/projected/cbb769f1-696a-4afa-8ef4-a3d230a07eda-kube-api-access-xvwcw\") pod \"obo-prometheus-operator-668cf9dfbb-vp4bg\" (UID: \"cbb769f1-696a-4afa-8ef4-a3d230a07eda\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vp4bg" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.715341 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a1318112-f771-48fb-8023-5e494d6dc382-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl\" (UID: \"a1318112-f771-48fb-8023-5e494d6dc382\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.715380 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4cafe8bf-4173-4b4a-b883-07b7450f9d92-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7dfbd95878-92svc\" (UID: \"4cafe8bf-4173-4b4a-b883-07b7450f9d92\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.715403 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a1318112-f771-48fb-8023-5e494d6dc382-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl\" (UID: \"a1318112-f771-48fb-8023-5e494d6dc382\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.735548 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-cm44b"] Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.745768 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvwcw\" (UniqueName: \"kubernetes.io/projected/cbb769f1-696a-4afa-8ef4-a3d230a07eda-kube-api-access-xvwcw\") pod \"obo-prometheus-operator-668cf9dfbb-vp4bg\" (UID: \"cbb769f1-696a-4afa-8ef4-a3d230a07eda\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vp4bg" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.799640 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vp4bg" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.801039 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-4vkfs"] Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.801623 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-4vkfs" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.803266 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-29hth" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.811513 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-4vkfs"] Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.821524 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4gzh\" (UniqueName: \"kubernetes.io/projected/6be7c78d-a792-4392-b915-507ca6867772-kube-api-access-d4gzh\") pod \"observability-operator-d8bb48f5d-cm44b\" (UID: \"6be7c78d-a792-4392-b915-507ca6867772\") " pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.821575 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/6be7c78d-a792-4392-b915-507ca6867772-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-cm44b\" (UID: \"6be7c78d-a792-4392-b915-507ca6867772\") " pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.821608 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4cafe8bf-4173-4b4a-b883-07b7450f9d92-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7dfbd95878-92svc\" (UID: \"4cafe8bf-4173-4b4a-b883-07b7450f9d92\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.821629 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a1318112-f771-48fb-8023-5e494d6dc382-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl\" (UID: \"a1318112-f771-48fb-8023-5e494d6dc382\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.821662 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4cafe8bf-4173-4b4a-b883-07b7450f9d92-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7dfbd95878-92svc\" (UID: \"4cafe8bf-4173-4b4a-b883-07b7450f9d92\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.821682 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a1318112-f771-48fb-8023-5e494d6dc382-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl\" (UID: \"a1318112-f771-48fb-8023-5e494d6dc382\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.825427 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4cafe8bf-4173-4b4a-b883-07b7450f9d92-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7dfbd95878-92svc\" (UID: \"4cafe8bf-4173-4b4a-b883-07b7450f9d92\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.831507 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a1318112-f771-48fb-8023-5e494d6dc382-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl\" (UID: \"a1318112-f771-48fb-8023-5e494d6dc382\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.835330 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a1318112-f771-48fb-8023-5e494d6dc382-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl\" (UID: \"a1318112-f771-48fb-8023-5e494d6dc382\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.836353 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4cafe8bf-4173-4b4a-b883-07b7450f9d92-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7dfbd95878-92svc\" (UID: \"4cafe8bf-4173-4b4a-b883-07b7450f9d92\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.910460 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.921121 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.922389 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4gzh\" (UniqueName: \"kubernetes.io/projected/6be7c78d-a792-4392-b915-507ca6867772-kube-api-access-d4gzh\") pod \"observability-operator-d8bb48f5d-cm44b\" (UID: \"6be7c78d-a792-4392-b915-507ca6867772\") " pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.922446 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtzcr\" (UniqueName: \"kubernetes.io/projected/1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9-kube-api-access-mtzcr\") pod \"perses-operator-5446b9c989-4vkfs\" (UID: \"1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9\") " pod="openshift-operators/perses-operator-5446b9c989-4vkfs" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.922468 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9-openshift-service-ca\") pod \"perses-operator-5446b9c989-4vkfs\" (UID: \"1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9\") " pod="openshift-operators/perses-operator-5446b9c989-4vkfs" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.922485 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/6be7c78d-a792-4392-b915-507ca6867772-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-cm44b\" (UID: \"6be7c78d-a792-4392-b915-507ca6867772\") " pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" Dec 11 10:29:15 crc kubenswrapper[4663]: I1211 10:29:15.940276 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4gzh\" (UniqueName: \"kubernetes.io/projected/6be7c78d-a792-4392-b915-507ca6867772-kube-api-access-d4gzh\") pod \"observability-operator-d8bb48f5d-cm44b\" (UID: \"6be7c78d-a792-4392-b915-507ca6867772\") " pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.024217 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtzcr\" (UniqueName: \"kubernetes.io/projected/1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9-kube-api-access-mtzcr\") pod \"perses-operator-5446b9c989-4vkfs\" (UID: \"1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9\") " pod="openshift-operators/perses-operator-5446b9c989-4vkfs" Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.024442 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9-openshift-service-ca\") pod \"perses-operator-5446b9c989-4vkfs\" (UID: \"1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9\") " pod="openshift-operators/perses-operator-5446b9c989-4vkfs" Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.025568 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9-openshift-service-ca\") pod \"perses-operator-5446b9c989-4vkfs\" (UID: \"1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9\") " pod="openshift-operators/perses-operator-5446b9c989-4vkfs" Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.040642 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtzcr\" (UniqueName: \"kubernetes.io/projected/1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9-kube-api-access-mtzcr\") pod \"perses-operator-5446b9c989-4vkfs\" (UID: \"1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9\") " pod="openshift-operators/perses-operator-5446b9c989-4vkfs" Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.041232 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-vp4bg"] Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.126550 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl"] Dec 11 10:29:16 crc kubenswrapper[4663]: W1211 10:29:16.135327 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1318112_f771_48fb_8023_5e494d6dc382.slice/crio-31a76207e557403502ce0911d8ae7defe69e23e1936eaec94b3ff177cfaa98a4 WatchSource:0}: Error finding container 31a76207e557403502ce0911d8ae7defe69e23e1936eaec94b3ff177cfaa98a4: Status 404 returned error can't find the container with id 31a76207e557403502ce0911d8ae7defe69e23e1936eaec94b3ff177cfaa98a4 Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.169923 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-4vkfs" Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.302633 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc"] Dec 11 10:29:16 crc kubenswrapper[4663]: W1211 10:29:16.320824 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4cafe8bf_4173_4b4a_b883_07b7450f9d92.slice/crio-0346ad69b07846f547637dd59d495421adbad1f9734a94e8ebea8c58e23943a8 WatchSource:0}: Error finding container 0346ad69b07846f547637dd59d495421adbad1f9734a94e8ebea8c58e23943a8: Status 404 returned error can't find the container with id 0346ad69b07846f547637dd59d495421adbad1f9734a94e8ebea8c58e23943a8 Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.369609 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-4vkfs"] Dec 11 10:29:16 crc kubenswrapper[4663]: W1211 10:29:16.376887 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c9993fc_c5ec_49fe_a68a_4da3fe9a8ac9.slice/crio-5f53d8c5abf6b225dfaef94e1e7c13a908c56d60715c002567ca029da10cb384 WatchSource:0}: Error finding container 5f53d8c5abf6b225dfaef94e1e7c13a908c56d60715c002567ca029da10cb384: Status 404 returned error can't find the container with id 5f53d8c5abf6b225dfaef94e1e7c13a908c56d60715c002567ca029da10cb384 Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.377573 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vp4bg" event={"ID":"cbb769f1-696a-4afa-8ef4-a3d230a07eda","Type":"ContainerStarted","Data":"d71094f0513b37f5d32dbd91af1a61a231ea1e327b3de9f2a0b0fff60452f59a"} Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.379674 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl" event={"ID":"a1318112-f771-48fb-8023-5e494d6dc382","Type":"ContainerStarted","Data":"31a76207e557403502ce0911d8ae7defe69e23e1936eaec94b3ff177cfaa98a4"} Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.381261 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc" event={"ID":"4cafe8bf-4173-4b4a-b883-07b7450f9d92","Type":"ContainerStarted","Data":"0346ad69b07846f547637dd59d495421adbad1f9734a94e8ebea8c58e23943a8"} Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.551385 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 11 10:29:16 crc kubenswrapper[4663]: I1211 10:29:16.559022 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/6be7c78d-a792-4392-b915-507ca6867772-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-cm44b\" (UID: \"6be7c78d-a792-4392-b915-507ca6867772\") " pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" Dec 11 10:29:17 crc kubenswrapper[4663]: I1211 10:29:17.108768 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-2qgkp" Dec 11 10:29:17 crc kubenswrapper[4663]: I1211 10:29:17.116028 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" Dec 11 10:29:17 crc kubenswrapper[4663]: I1211 10:29:17.319400 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-cm44b"] Dec 11 10:29:17 crc kubenswrapper[4663]: W1211 10:29:17.329036 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6be7c78d_a792_4392_b915_507ca6867772.slice/crio-04d64a0e382a6927e61564abf2323d9dde3edc6001c2e428b4d221f9d4b92dc1 WatchSource:0}: Error finding container 04d64a0e382a6927e61564abf2323d9dde3edc6001c2e428b4d221f9d4b92dc1: Status 404 returned error can't find the container with id 04d64a0e382a6927e61564abf2323d9dde3edc6001c2e428b4d221f9d4b92dc1 Dec 11 10:29:17 crc kubenswrapper[4663]: I1211 10:29:17.387144 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" event={"ID":"6be7c78d-a792-4392-b915-507ca6867772","Type":"ContainerStarted","Data":"04d64a0e382a6927e61564abf2323d9dde3edc6001c2e428b4d221f9d4b92dc1"} Dec 11 10:29:17 crc kubenswrapper[4663]: I1211 10:29:17.388145 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-4vkfs" event={"ID":"1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9","Type":"ContainerStarted","Data":"5f53d8c5abf6b225dfaef94e1e7c13a908c56d60715c002567ca029da10cb384"} Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.460532 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl" event={"ID":"a1318112-f771-48fb-8023-5e494d6dc382","Type":"ContainerStarted","Data":"963e54e2e853e94b4a2ad871841c9705c6ae245fcf30163cf83833bedbfc00f5"} Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.461703 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc" event={"ID":"4cafe8bf-4173-4b4a-b883-07b7450f9d92","Type":"ContainerStarted","Data":"fac4fad0b73ccc60f7296238b8c6dbe51fdbe6f2ee3d6a5a4c46db700349b11c"} Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.463281 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" event={"ID":"6be7c78d-a792-4392-b915-507ca6867772","Type":"ContainerStarted","Data":"93c109752ef7e3f1956d52455763a10d26f0f04d6113af06b5735f534f508967"} Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.463447 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.464485 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vp4bg" event={"ID":"cbb769f1-696a-4afa-8ef4-a3d230a07eda","Type":"ContainerStarted","Data":"c4dafda5cc342373030319de3e19821b93df06ecdf4fb9171930f28941967682"} Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.465078 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.465826 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-4vkfs" event={"ID":"1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9","Type":"ContainerStarted","Data":"9ebe06fd2557a17482e870849b6f35f31d851f71c9ccdb452f3b4e4542f30ea5"} Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.465947 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-4vkfs" Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.474422 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl" podStartSLOduration=2.250579033 podStartE2EDuration="11.474411153s" podCreationTimestamp="2025-12-11 10:29:15 +0000 UTC" firstStartedPulling="2025-12-11 10:29:16.142894566 +0000 UTC m=+686.960824350" lastFinishedPulling="2025-12-11 10:29:25.366726686 +0000 UTC m=+696.184656470" observedRunningTime="2025-12-11 10:29:26.473723839 +0000 UTC m=+697.291653623" watchObservedRunningTime="2025-12-11 10:29:26.474411153 +0000 UTC m=+697.292340936" Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.490278 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dfbd95878-92svc" podStartSLOduration=2.459627883 podStartE2EDuration="11.490264596s" podCreationTimestamp="2025-12-11 10:29:15 +0000 UTC" firstStartedPulling="2025-12-11 10:29:16.335571456 +0000 UTC m=+687.153501241" lastFinishedPulling="2025-12-11 10:29:25.366208171 +0000 UTC m=+696.184137954" observedRunningTime="2025-12-11 10:29:26.486803232 +0000 UTC m=+697.304733015" watchObservedRunningTime="2025-12-11 10:29:26.490264596 +0000 UTC m=+697.308194380" Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.510394 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-cm44b" podStartSLOduration=3.466212828 podStartE2EDuration="11.510381642s" podCreationTimestamp="2025-12-11 10:29:15 +0000 UTC" firstStartedPulling="2025-12-11 10:29:17.330896544 +0000 UTC m=+688.148826328" lastFinishedPulling="2025-12-11 10:29:25.375065358 +0000 UTC m=+696.192995142" observedRunningTime="2025-12-11 10:29:26.506337912 +0000 UTC m=+697.324267696" watchObservedRunningTime="2025-12-11 10:29:26.510381642 +0000 UTC m=+697.328311426" Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.536772 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vp4bg" podStartSLOduration=2.239909694 podStartE2EDuration="11.536758448s" podCreationTimestamp="2025-12-11 10:29:15 +0000 UTC" firstStartedPulling="2025-12-11 10:29:16.055620704 +0000 UTC m=+686.873550487" lastFinishedPulling="2025-12-11 10:29:25.352469456 +0000 UTC m=+696.170399241" observedRunningTime="2025-12-11 10:29:26.534846841 +0000 UTC m=+697.352776624" watchObservedRunningTime="2025-12-11 10:29:26.536758448 +0000 UTC m=+697.354688221" Dec 11 10:29:26 crc kubenswrapper[4663]: I1211 10:29:26.538942 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-4vkfs" podStartSLOduration=2.564588125 podStartE2EDuration="11.538933531s" podCreationTimestamp="2025-12-11 10:29:15 +0000 UTC" firstStartedPulling="2025-12-11 10:29:16.378815883 +0000 UTC m=+687.196745667" lastFinishedPulling="2025-12-11 10:29:25.353161289 +0000 UTC m=+696.171091073" observedRunningTime="2025-12-11 10:29:26.523265106 +0000 UTC m=+697.341194900" watchObservedRunningTime="2025-12-11 10:29:26.538933531 +0000 UTC m=+697.356863315" Dec 11 10:29:36 crc kubenswrapper[4663]: I1211 10:29:36.172209 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-4vkfs" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.197400 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk"] Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.198567 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.200744 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.202821 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk"] Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.270911 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.270974 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.363508 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0229fac6-4edc-4051-aab4-b2d74321e50e-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk\" (UID: \"0229fac6-4edc-4051-aab4-b2d74321e50e\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.363576 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0229fac6-4edc-4051-aab4-b2d74321e50e-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk\" (UID: \"0229fac6-4edc-4051-aab4-b2d74321e50e\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.363636 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpcgf\" (UniqueName: \"kubernetes.io/projected/0229fac6-4edc-4051-aab4-b2d74321e50e-kube-api-access-xpcgf\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk\" (UID: \"0229fac6-4edc-4051-aab4-b2d74321e50e\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.465245 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0229fac6-4edc-4051-aab4-b2d74321e50e-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk\" (UID: \"0229fac6-4edc-4051-aab4-b2d74321e50e\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.465324 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpcgf\" (UniqueName: \"kubernetes.io/projected/0229fac6-4edc-4051-aab4-b2d74321e50e-kube-api-access-xpcgf\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk\" (UID: \"0229fac6-4edc-4051-aab4-b2d74321e50e\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.465395 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0229fac6-4edc-4051-aab4-b2d74321e50e-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk\" (UID: \"0229fac6-4edc-4051-aab4-b2d74321e50e\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.465681 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0229fac6-4edc-4051-aab4-b2d74321e50e-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk\" (UID: \"0229fac6-4edc-4051-aab4-b2d74321e50e\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.465724 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0229fac6-4edc-4051-aab4-b2d74321e50e-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk\" (UID: \"0229fac6-4edc-4051-aab4-b2d74321e50e\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.480089 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpcgf\" (UniqueName: \"kubernetes.io/projected/0229fac6-4edc-4051-aab4-b2d74321e50e-kube-api-access-xpcgf\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk\" (UID: \"0229fac6-4edc-4051-aab4-b2d74321e50e\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.512083 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.521404 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:29:49 crc kubenswrapper[4663]: I1211 10:29:49.859591 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk"] Dec 11 10:29:49 crc kubenswrapper[4663]: W1211 10:29:49.863063 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0229fac6_4edc_4051_aab4_b2d74321e50e.slice/crio-dc55c8063828e3d22fedf26ffd10651a20e9791ba31487a271df709946772397 WatchSource:0}: Error finding container dc55c8063828e3d22fedf26ffd10651a20e9791ba31487a271df709946772397: Status 404 returned error can't find the container with id dc55c8063828e3d22fedf26ffd10651a20e9791ba31487a271df709946772397 Dec 11 10:29:50 crc kubenswrapper[4663]: I1211 10:29:50.564580 4663 generic.go:334] "Generic (PLEG): container finished" podID="0229fac6-4edc-4051-aab4-b2d74321e50e" containerID="565f3fa66c1026cd18bd7f83df1d411f8e76815dcf0c5c393377d2b79a1413a8" exitCode=0 Dec 11 10:29:50 crc kubenswrapper[4663]: I1211 10:29:50.564620 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" event={"ID":"0229fac6-4edc-4051-aab4-b2d74321e50e","Type":"ContainerDied","Data":"565f3fa66c1026cd18bd7f83df1d411f8e76815dcf0c5c393377d2b79a1413a8"} Dec 11 10:29:50 crc kubenswrapper[4663]: I1211 10:29:50.564773 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" event={"ID":"0229fac6-4edc-4051-aab4-b2d74321e50e","Type":"ContainerStarted","Data":"dc55c8063828e3d22fedf26ffd10651a20e9791ba31487a271df709946772397"} Dec 11 10:29:52 crc kubenswrapper[4663]: I1211 10:29:52.573799 4663 generic.go:334] "Generic (PLEG): container finished" podID="0229fac6-4edc-4051-aab4-b2d74321e50e" containerID="ffb33ec48a1ddde5c8cd76927c6c8712350f03b2f1cea0f0d7e13cc3ea1a84c4" exitCode=0 Dec 11 10:29:52 crc kubenswrapper[4663]: I1211 10:29:52.573871 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" event={"ID":"0229fac6-4edc-4051-aab4-b2d74321e50e","Type":"ContainerDied","Data":"ffb33ec48a1ddde5c8cd76927c6c8712350f03b2f1cea0f0d7e13cc3ea1a84c4"} Dec 11 10:29:53 crc kubenswrapper[4663]: I1211 10:29:53.580752 4663 generic.go:334] "Generic (PLEG): container finished" podID="0229fac6-4edc-4051-aab4-b2d74321e50e" containerID="38d8a9c5bb139a64461567bd63752c3c0c5813011ce6adc2ed5cc87fff3ddc83" exitCode=0 Dec 11 10:29:53 crc kubenswrapper[4663]: I1211 10:29:53.580786 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" event={"ID":"0229fac6-4edc-4051-aab4-b2d74321e50e","Type":"ContainerDied","Data":"38d8a9c5bb139a64461567bd63752c3c0c5813011ce6adc2ed5cc87fff3ddc83"} Dec 11 10:29:54 crc kubenswrapper[4663]: I1211 10:29:54.747620 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:29:54 crc kubenswrapper[4663]: I1211 10:29:54.930566 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpcgf\" (UniqueName: \"kubernetes.io/projected/0229fac6-4edc-4051-aab4-b2d74321e50e-kube-api-access-xpcgf\") pod \"0229fac6-4edc-4051-aab4-b2d74321e50e\" (UID: \"0229fac6-4edc-4051-aab4-b2d74321e50e\") " Dec 11 10:29:54 crc kubenswrapper[4663]: I1211 10:29:54.930649 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0229fac6-4edc-4051-aab4-b2d74321e50e-util\") pod \"0229fac6-4edc-4051-aab4-b2d74321e50e\" (UID: \"0229fac6-4edc-4051-aab4-b2d74321e50e\") " Dec 11 10:29:54 crc kubenswrapper[4663]: I1211 10:29:54.930683 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0229fac6-4edc-4051-aab4-b2d74321e50e-bundle\") pod \"0229fac6-4edc-4051-aab4-b2d74321e50e\" (UID: \"0229fac6-4edc-4051-aab4-b2d74321e50e\") " Dec 11 10:29:54 crc kubenswrapper[4663]: I1211 10:29:54.931221 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0229fac6-4edc-4051-aab4-b2d74321e50e-bundle" (OuterVolumeSpecName: "bundle") pod "0229fac6-4edc-4051-aab4-b2d74321e50e" (UID: "0229fac6-4edc-4051-aab4-b2d74321e50e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:29:54 crc kubenswrapper[4663]: I1211 10:29:54.934710 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0229fac6-4edc-4051-aab4-b2d74321e50e-kube-api-access-xpcgf" (OuterVolumeSpecName: "kube-api-access-xpcgf") pod "0229fac6-4edc-4051-aab4-b2d74321e50e" (UID: "0229fac6-4edc-4051-aab4-b2d74321e50e"). InnerVolumeSpecName "kube-api-access-xpcgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:29:54 crc kubenswrapper[4663]: I1211 10:29:54.940485 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0229fac6-4edc-4051-aab4-b2d74321e50e-util" (OuterVolumeSpecName: "util") pod "0229fac6-4edc-4051-aab4-b2d74321e50e" (UID: "0229fac6-4edc-4051-aab4-b2d74321e50e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:29:55 crc kubenswrapper[4663]: I1211 10:29:55.031745 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpcgf\" (UniqueName: \"kubernetes.io/projected/0229fac6-4edc-4051-aab4-b2d74321e50e-kube-api-access-xpcgf\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:55 crc kubenswrapper[4663]: I1211 10:29:55.031770 4663 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0229fac6-4edc-4051-aab4-b2d74321e50e-util\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:55 crc kubenswrapper[4663]: I1211 10:29:55.031781 4663 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0229fac6-4edc-4051-aab4-b2d74321e50e-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:55 crc kubenswrapper[4663]: I1211 10:29:55.589628 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" event={"ID":"0229fac6-4edc-4051-aab4-b2d74321e50e","Type":"ContainerDied","Data":"dc55c8063828e3d22fedf26ffd10651a20e9791ba31487a271df709946772397"} Dec 11 10:29:55 crc kubenswrapper[4663]: I1211 10:29:55.589664 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc55c8063828e3d22fedf26ffd10651a20e9791ba31487a271df709946772397" Dec 11 10:29:55 crc kubenswrapper[4663]: I1211 10:29:55.589695 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.133429 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2"] Dec 11 10:30:00 crc kubenswrapper[4663]: E1211 10:30:00.133902 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0229fac6-4edc-4051-aab4-b2d74321e50e" containerName="pull" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.133914 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="0229fac6-4edc-4051-aab4-b2d74321e50e" containerName="pull" Dec 11 10:30:00 crc kubenswrapper[4663]: E1211 10:30:00.133927 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0229fac6-4edc-4051-aab4-b2d74321e50e" containerName="extract" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.133945 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="0229fac6-4edc-4051-aab4-b2d74321e50e" containerName="extract" Dec 11 10:30:00 crc kubenswrapper[4663]: E1211 10:30:00.133954 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0229fac6-4edc-4051-aab4-b2d74321e50e" containerName="util" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.133959 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="0229fac6-4edc-4051-aab4-b2d74321e50e" containerName="util" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.134059 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="0229fac6-4edc-4051-aab4-b2d74321e50e" containerName="extract" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.134417 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.135805 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.136136 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.141053 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2"] Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.289347 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/973e5483-5229-4985-bfe1-c5f4418565fc-config-volume\") pod \"collect-profiles-29424150-7g6k2\" (UID: \"973e5483-5229-4985-bfe1-c5f4418565fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.289491 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md6l2\" (UniqueName: \"kubernetes.io/projected/973e5483-5229-4985-bfe1-c5f4418565fc-kube-api-access-md6l2\") pod \"collect-profiles-29424150-7g6k2\" (UID: \"973e5483-5229-4985-bfe1-c5f4418565fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.289554 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/973e5483-5229-4985-bfe1-c5f4418565fc-secret-volume\") pod \"collect-profiles-29424150-7g6k2\" (UID: \"973e5483-5229-4985-bfe1-c5f4418565fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.390870 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md6l2\" (UniqueName: \"kubernetes.io/projected/973e5483-5229-4985-bfe1-c5f4418565fc-kube-api-access-md6l2\") pod \"collect-profiles-29424150-7g6k2\" (UID: \"973e5483-5229-4985-bfe1-c5f4418565fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.390912 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/973e5483-5229-4985-bfe1-c5f4418565fc-secret-volume\") pod \"collect-profiles-29424150-7g6k2\" (UID: \"973e5483-5229-4985-bfe1-c5f4418565fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.390979 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/973e5483-5229-4985-bfe1-c5f4418565fc-config-volume\") pod \"collect-profiles-29424150-7g6k2\" (UID: \"973e5483-5229-4985-bfe1-c5f4418565fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.391657 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/973e5483-5229-4985-bfe1-c5f4418565fc-config-volume\") pod \"collect-profiles-29424150-7g6k2\" (UID: \"973e5483-5229-4985-bfe1-c5f4418565fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.395448 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/973e5483-5229-4985-bfe1-c5f4418565fc-secret-volume\") pod \"collect-profiles-29424150-7g6k2\" (UID: \"973e5483-5229-4985-bfe1-c5f4418565fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.403916 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md6l2\" (UniqueName: \"kubernetes.io/projected/973e5483-5229-4985-bfe1-c5f4418565fc-kube-api-access-md6l2\") pod \"collect-profiles-29424150-7g6k2\" (UID: \"973e5483-5229-4985-bfe1-c5f4418565fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.446826 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.491551 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-rg6tv"] Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.492160 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-rg6tv" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.493260 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-crfqd" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.493448 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.493483 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.501736 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-rg6tv"] Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.593291 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbfjg\" (UniqueName: \"kubernetes.io/projected/bc3b05ee-7cfe-4661-91fb-280a23f47129-kube-api-access-dbfjg\") pod \"nmstate-operator-6769fb99d-rg6tv\" (UID: \"bc3b05ee-7cfe-4661-91fb-280a23f47129\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-rg6tv" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.694923 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbfjg\" (UniqueName: \"kubernetes.io/projected/bc3b05ee-7cfe-4661-91fb-280a23f47129-kube-api-access-dbfjg\") pod \"nmstate-operator-6769fb99d-rg6tv\" (UID: \"bc3b05ee-7cfe-4661-91fb-280a23f47129\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-rg6tv" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.709458 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbfjg\" (UniqueName: \"kubernetes.io/projected/bc3b05ee-7cfe-4661-91fb-280a23f47129-kube-api-access-dbfjg\") pod \"nmstate-operator-6769fb99d-rg6tv\" (UID: \"bc3b05ee-7cfe-4661-91fb-280a23f47129\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-rg6tv" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.804676 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-rg6tv" Dec 11 10:30:00 crc kubenswrapper[4663]: I1211 10:30:00.813526 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2"] Dec 11 10:30:01 crc kubenswrapper[4663]: I1211 10:30:01.137057 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-rg6tv"] Dec 11 10:30:01 crc kubenswrapper[4663]: W1211 10:30:01.140320 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc3b05ee_7cfe_4661_91fb_280a23f47129.slice/crio-4b0e3fefde5b30d292ab7b86e716a220ff87bbc188f6376ba6dbbc039139afa6 WatchSource:0}: Error finding container 4b0e3fefde5b30d292ab7b86e716a220ff87bbc188f6376ba6dbbc039139afa6: Status 404 returned error can't find the container with id 4b0e3fefde5b30d292ab7b86e716a220ff87bbc188f6376ba6dbbc039139afa6 Dec 11 10:30:01 crc kubenswrapper[4663]: I1211 10:30:01.612091 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-rg6tv" event={"ID":"bc3b05ee-7cfe-4661-91fb-280a23f47129","Type":"ContainerStarted","Data":"4b0e3fefde5b30d292ab7b86e716a220ff87bbc188f6376ba6dbbc039139afa6"} Dec 11 10:30:01 crc kubenswrapper[4663]: I1211 10:30:01.613508 4663 generic.go:334] "Generic (PLEG): container finished" podID="973e5483-5229-4985-bfe1-c5f4418565fc" containerID="e93aa43da27dbb0157e1cf5bb57267f81618dca9e7e74c4dd6b60c3a7dab9369" exitCode=0 Dec 11 10:30:01 crc kubenswrapper[4663]: I1211 10:30:01.613541 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" event={"ID":"973e5483-5229-4985-bfe1-c5f4418565fc","Type":"ContainerDied","Data":"e93aa43da27dbb0157e1cf5bb57267f81618dca9e7e74c4dd6b60c3a7dab9369"} Dec 11 10:30:01 crc kubenswrapper[4663]: I1211 10:30:01.613577 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" event={"ID":"973e5483-5229-4985-bfe1-c5f4418565fc","Type":"ContainerStarted","Data":"29b9fb0dfc0ca50b37f439e182840bee246089f1017b40535d9cb392a649fc75"} Dec 11 10:30:02 crc kubenswrapper[4663]: I1211 10:30:02.789259 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:02 crc kubenswrapper[4663]: I1211 10:30:02.920208 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/973e5483-5229-4985-bfe1-c5f4418565fc-secret-volume\") pod \"973e5483-5229-4985-bfe1-c5f4418565fc\" (UID: \"973e5483-5229-4985-bfe1-c5f4418565fc\") " Dec 11 10:30:02 crc kubenswrapper[4663]: I1211 10:30:02.920256 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/973e5483-5229-4985-bfe1-c5f4418565fc-config-volume\") pod \"973e5483-5229-4985-bfe1-c5f4418565fc\" (UID: \"973e5483-5229-4985-bfe1-c5f4418565fc\") " Dec 11 10:30:02 crc kubenswrapper[4663]: I1211 10:30:02.920347 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md6l2\" (UniqueName: \"kubernetes.io/projected/973e5483-5229-4985-bfe1-c5f4418565fc-kube-api-access-md6l2\") pod \"973e5483-5229-4985-bfe1-c5f4418565fc\" (UID: \"973e5483-5229-4985-bfe1-c5f4418565fc\") " Dec 11 10:30:02 crc kubenswrapper[4663]: I1211 10:30:02.921037 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/973e5483-5229-4985-bfe1-c5f4418565fc-config-volume" (OuterVolumeSpecName: "config-volume") pod "973e5483-5229-4985-bfe1-c5f4418565fc" (UID: "973e5483-5229-4985-bfe1-c5f4418565fc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:30:02 crc kubenswrapper[4663]: I1211 10:30:02.937504 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/973e5483-5229-4985-bfe1-c5f4418565fc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "973e5483-5229-4985-bfe1-c5f4418565fc" (UID: "973e5483-5229-4985-bfe1-c5f4418565fc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:30:02 crc kubenswrapper[4663]: I1211 10:30:02.937585 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/973e5483-5229-4985-bfe1-c5f4418565fc-kube-api-access-md6l2" (OuterVolumeSpecName: "kube-api-access-md6l2") pod "973e5483-5229-4985-bfe1-c5f4418565fc" (UID: "973e5483-5229-4985-bfe1-c5f4418565fc"). InnerVolumeSpecName "kube-api-access-md6l2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:30:03 crc kubenswrapper[4663]: I1211 10:30:03.021868 4663 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/973e5483-5229-4985-bfe1-c5f4418565fc-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:03 crc kubenswrapper[4663]: I1211 10:30:03.022066 4663 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/973e5483-5229-4985-bfe1-c5f4418565fc-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:03 crc kubenswrapper[4663]: I1211 10:30:03.022127 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md6l2\" (UniqueName: \"kubernetes.io/projected/973e5483-5229-4985-bfe1-c5f4418565fc-kube-api-access-md6l2\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:03 crc kubenswrapper[4663]: I1211 10:30:03.627619 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" event={"ID":"973e5483-5229-4985-bfe1-c5f4418565fc","Type":"ContainerDied","Data":"29b9fb0dfc0ca50b37f439e182840bee246089f1017b40535d9cb392a649fc75"} Dec 11 10:30:03 crc kubenswrapper[4663]: I1211 10:30:03.627653 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29b9fb0dfc0ca50b37f439e182840bee246089f1017b40535d9cb392a649fc75" Dec 11 10:30:03 crc kubenswrapper[4663]: I1211 10:30:03.627679 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-7g6k2" Dec 11 10:30:05 crc kubenswrapper[4663]: I1211 10:30:05.637304 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-rg6tv" event={"ID":"bc3b05ee-7cfe-4661-91fb-280a23f47129","Type":"ContainerStarted","Data":"bf36a12f09f6cbbdb8ad42d81ebf83845d478e916a29161fd78a58b1c29db3ab"} Dec 11 10:30:05 crc kubenswrapper[4663]: I1211 10:30:05.648315 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-6769fb99d-rg6tv" podStartSLOduration=2.183704405 podStartE2EDuration="5.648300947s" podCreationTimestamp="2025-12-11 10:30:00 +0000 UTC" firstStartedPulling="2025-12-11 10:30:01.142221423 +0000 UTC m=+731.960151207" lastFinishedPulling="2025-12-11 10:30:04.606817964 +0000 UTC m=+735.424747749" observedRunningTime="2025-12-11 10:30:05.647309442 +0000 UTC m=+736.465239226" watchObservedRunningTime="2025-12-11 10:30:05.648300947 +0000 UTC m=+736.466230732" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.043991 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-xpdhc"] Dec 11 10:30:10 crc kubenswrapper[4663]: E1211 10:30:10.044421 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="973e5483-5229-4985-bfe1-c5f4418565fc" containerName="collect-profiles" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.044434 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="973e5483-5229-4985-bfe1-c5f4418565fc" containerName="collect-profiles" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.044531 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="973e5483-5229-4985-bfe1-c5f4418565fc" containerName="collect-profiles" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.044998 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-xpdhc" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.046800 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-cqvmr" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.052268 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx"] Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.052982 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.056081 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-xpdhc"] Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.056301 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.071244 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx"] Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.098110 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-7hlpp"] Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.101452 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.155599 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77"] Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.156263 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.158211 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.158730 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.159840 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-zpw2n" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.170916 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77"] Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.206363 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnjmb\" (UniqueName: \"kubernetes.io/projected/bf360f09-bc5b-4440-8985-adc5db394494-kube-api-access-gnjmb\") pod \"nmstate-webhook-f8fb84555-jqmgx\" (UID: \"bf360f09-bc5b-4440-8985-adc5db394494\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.206445 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjt5q\" (UniqueName: \"kubernetes.io/projected/7a691467-080b-4036-b702-43977278b399-kube-api-access-xjt5q\") pod \"nmstate-handler-7hlpp\" (UID: \"7a691467-080b-4036-b702-43977278b399\") " pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.206480 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2c4r\" (UniqueName: \"kubernetes.io/projected/66a3bc5f-1b5b-432b-9ea2-b3ea5f9fc313-kube-api-access-n2c4r\") pod \"nmstate-metrics-7f7f7578db-xpdhc\" (UID: \"66a3bc5f-1b5b-432b-9ea2-b3ea5f9fc313\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-xpdhc" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.206499 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7a691467-080b-4036-b702-43977278b399-nmstate-lock\") pod \"nmstate-handler-7hlpp\" (UID: \"7a691467-080b-4036-b702-43977278b399\") " pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.206985 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7a691467-080b-4036-b702-43977278b399-ovs-socket\") pod \"nmstate-handler-7hlpp\" (UID: \"7a691467-080b-4036-b702-43977278b399\") " pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.207029 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/bf360f09-bc5b-4440-8985-adc5db394494-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-jqmgx\" (UID: \"bf360f09-bc5b-4440-8985-adc5db394494\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.207064 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7a691467-080b-4036-b702-43977278b399-dbus-socket\") pod \"nmstate-handler-7hlpp\" (UID: \"7a691467-080b-4036-b702-43977278b399\") " pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.308346 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjt5q\" (UniqueName: \"kubernetes.io/projected/7a691467-080b-4036-b702-43977278b399-kube-api-access-xjt5q\") pod \"nmstate-handler-7hlpp\" (UID: \"7a691467-080b-4036-b702-43977278b399\") " pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.308401 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2c4r\" (UniqueName: \"kubernetes.io/projected/66a3bc5f-1b5b-432b-9ea2-b3ea5f9fc313-kube-api-access-n2c4r\") pod \"nmstate-metrics-7f7f7578db-xpdhc\" (UID: \"66a3bc5f-1b5b-432b-9ea2-b3ea5f9fc313\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-xpdhc" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.308424 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7a691467-080b-4036-b702-43977278b399-nmstate-lock\") pod \"nmstate-handler-7hlpp\" (UID: \"7a691467-080b-4036-b702-43977278b399\") " pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.308447 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7a691467-080b-4036-b702-43977278b399-ovs-socket\") pod \"nmstate-handler-7hlpp\" (UID: \"7a691467-080b-4036-b702-43977278b399\") " pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.308471 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/bf360f09-bc5b-4440-8985-adc5db394494-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-jqmgx\" (UID: \"bf360f09-bc5b-4440-8985-adc5db394494\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.308492 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7a691467-080b-4036-b702-43977278b399-dbus-socket\") pod \"nmstate-handler-7hlpp\" (UID: \"7a691467-080b-4036-b702-43977278b399\") " pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.308527 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc27b\" (UniqueName: \"kubernetes.io/projected/ca2f1c81-8dbc-4d92-95ef-c70df44a0801-kube-api-access-wc27b\") pod \"nmstate-console-plugin-6ff7998486-47k77\" (UID: \"ca2f1c81-8dbc-4d92-95ef-c70df44a0801\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.308556 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnjmb\" (UniqueName: \"kubernetes.io/projected/bf360f09-bc5b-4440-8985-adc5db394494-kube-api-access-gnjmb\") pod \"nmstate-webhook-f8fb84555-jqmgx\" (UID: \"bf360f09-bc5b-4440-8985-adc5db394494\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.308583 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ca2f1c81-8dbc-4d92-95ef-c70df44a0801-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-47k77\" (UID: \"ca2f1c81-8dbc-4d92-95ef-c70df44a0801\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.308600 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/ca2f1c81-8dbc-4d92-95ef-c70df44a0801-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-47k77\" (UID: \"ca2f1c81-8dbc-4d92-95ef-c70df44a0801\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.309347 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7a691467-080b-4036-b702-43977278b399-nmstate-lock\") pod \"nmstate-handler-7hlpp\" (UID: \"7a691467-080b-4036-b702-43977278b399\") " pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.309378 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7a691467-080b-4036-b702-43977278b399-ovs-socket\") pod \"nmstate-handler-7hlpp\" (UID: \"7a691467-080b-4036-b702-43977278b399\") " pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.310314 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7a691467-080b-4036-b702-43977278b399-dbus-socket\") pod \"nmstate-handler-7hlpp\" (UID: \"7a691467-080b-4036-b702-43977278b399\") " pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.315169 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7cc96b59bf-9bdh8"] Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.315631 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/bf360f09-bc5b-4440-8985-adc5db394494-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-jqmgx\" (UID: \"bf360f09-bc5b-4440-8985-adc5db394494\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.315785 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.325748 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjt5q\" (UniqueName: \"kubernetes.io/projected/7a691467-080b-4036-b702-43977278b399-kube-api-access-xjt5q\") pod \"nmstate-handler-7hlpp\" (UID: \"7a691467-080b-4036-b702-43977278b399\") " pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.333777 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2c4r\" (UniqueName: \"kubernetes.io/projected/66a3bc5f-1b5b-432b-9ea2-b3ea5f9fc313-kube-api-access-n2c4r\") pod \"nmstate-metrics-7f7f7578db-xpdhc\" (UID: \"66a3bc5f-1b5b-432b-9ea2-b3ea5f9fc313\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-xpdhc" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.335852 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnjmb\" (UniqueName: \"kubernetes.io/projected/bf360f09-bc5b-4440-8985-adc5db394494-kube-api-access-gnjmb\") pod \"nmstate-webhook-f8fb84555-jqmgx\" (UID: \"bf360f09-bc5b-4440-8985-adc5db394494\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.343484 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7cc96b59bf-9bdh8"] Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.362195 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-xpdhc" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.368541 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.410258 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ca2f1c81-8dbc-4d92-95ef-c70df44a0801-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-47k77\" (UID: \"ca2f1c81-8dbc-4d92-95ef-c70df44a0801\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.410298 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/ca2f1c81-8dbc-4d92-95ef-c70df44a0801-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-47k77\" (UID: \"ca2f1c81-8dbc-4d92-95ef-c70df44a0801\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.410325 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8cf2\" (UniqueName: \"kubernetes.io/projected/c1bcb873-ea08-4017-b4c9-d52f48def720-kube-api-access-z8cf2\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.410352 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c1bcb873-ea08-4017-b4c9-d52f48def720-console-config\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.410369 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c1bcb873-ea08-4017-b4c9-d52f48def720-oauth-serving-cert\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.410408 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1bcb873-ea08-4017-b4c9-d52f48def720-trusted-ca-bundle\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.410430 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c1bcb873-ea08-4017-b4c9-d52f48def720-console-oauth-config\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.410458 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c1bcb873-ea08-4017-b4c9-d52f48def720-service-ca\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.410473 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c1bcb873-ea08-4017-b4c9-d52f48def720-console-serving-cert\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.410509 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc27b\" (UniqueName: \"kubernetes.io/projected/ca2f1c81-8dbc-4d92-95ef-c70df44a0801-kube-api-access-wc27b\") pod \"nmstate-console-plugin-6ff7998486-47k77\" (UID: \"ca2f1c81-8dbc-4d92-95ef-c70df44a0801\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.411611 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/ca2f1c81-8dbc-4d92-95ef-c70df44a0801-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-47k77\" (UID: \"ca2f1c81-8dbc-4d92-95ef-c70df44a0801\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.413766 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ca2f1c81-8dbc-4d92-95ef-c70df44a0801-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-47k77\" (UID: \"ca2f1c81-8dbc-4d92-95ef-c70df44a0801\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.416566 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.425852 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc27b\" (UniqueName: \"kubernetes.io/projected/ca2f1c81-8dbc-4d92-95ef-c70df44a0801-kube-api-access-wc27b\") pod \"nmstate-console-plugin-6ff7998486-47k77\" (UID: \"ca2f1c81-8dbc-4d92-95ef-c70df44a0801\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.468077 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.512021 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8cf2\" (UniqueName: \"kubernetes.io/projected/c1bcb873-ea08-4017-b4c9-d52f48def720-kube-api-access-z8cf2\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.512064 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c1bcb873-ea08-4017-b4c9-d52f48def720-console-config\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.512085 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c1bcb873-ea08-4017-b4c9-d52f48def720-oauth-serving-cert\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.512123 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1bcb873-ea08-4017-b4c9-d52f48def720-trusted-ca-bundle\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.512149 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c1bcb873-ea08-4017-b4c9-d52f48def720-console-oauth-config\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.512204 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c1bcb873-ea08-4017-b4c9-d52f48def720-service-ca\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.512222 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c1bcb873-ea08-4017-b4c9-d52f48def720-console-serving-cert\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.513091 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c1bcb873-ea08-4017-b4c9-d52f48def720-console-config\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.513345 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c1bcb873-ea08-4017-b4c9-d52f48def720-oauth-serving-cert\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.513427 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1bcb873-ea08-4017-b4c9-d52f48def720-trusted-ca-bundle\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.513716 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c1bcb873-ea08-4017-b4c9-d52f48def720-service-ca\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.517054 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c1bcb873-ea08-4017-b4c9-d52f48def720-console-serving-cert\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.518524 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c1bcb873-ea08-4017-b4c9-d52f48def720-console-oauth-config\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.527896 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8cf2\" (UniqueName: \"kubernetes.io/projected/c1bcb873-ea08-4017-b4c9-d52f48def720-kube-api-access-z8cf2\") pod \"console-7cc96b59bf-9bdh8\" (UID: \"c1bcb873-ea08-4017-b4c9-d52f48def720\") " pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.605912 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77"] Dec 11 10:30:10 crc kubenswrapper[4663]: W1211 10:30:10.609604 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca2f1c81_8dbc_4d92_95ef_c70df44a0801.slice/crio-2e3a8edc25af2d1d8755313cca09a736c3ec85c820624ca541d648b0ef4492f5 WatchSource:0}: Error finding container 2e3a8edc25af2d1d8755313cca09a736c3ec85c820624ca541d648b0ef4492f5: Status 404 returned error can't find the container with id 2e3a8edc25af2d1d8755313cca09a736c3ec85c820624ca541d648b0ef4492f5 Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.659218 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-7hlpp" event={"ID":"7a691467-080b-4036-b702-43977278b399","Type":"ContainerStarted","Data":"e6105a113b1243924d36468eb3860cc3ff7ae37aa7c76530b20b999515f66e40"} Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.659974 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" event={"ID":"ca2f1c81-8dbc-4d92-95ef-c70df44a0801","Type":"ContainerStarted","Data":"2e3a8edc25af2d1d8755313cca09a736c3ec85c820624ca541d648b0ef4492f5"} Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.663378 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.733824 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-xpdhc"] Dec 11 10:30:10 crc kubenswrapper[4663]: W1211 10:30:10.740156 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66a3bc5f_1b5b_432b_9ea2_b3ea5f9fc313.slice/crio-41ff886ccde778125d2d2ee5a41bf1149ef30dee6f881243a2c34847a7dae444 WatchSource:0}: Error finding container 41ff886ccde778125d2d2ee5a41bf1149ef30dee6f881243a2c34847a7dae444: Status 404 returned error can't find the container with id 41ff886ccde778125d2d2ee5a41bf1149ef30dee6f881243a2c34847a7dae444 Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.770007 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx"] Dec 11 10:30:10 crc kubenswrapper[4663]: W1211 10:30:10.774511 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf360f09_bc5b_4440_8985_adc5db394494.slice/crio-cf2e72d16c4b87d29fd86df99fc2331e72b97450dc45c63c7ac5abf64401326c WatchSource:0}: Error finding container cf2e72d16c4b87d29fd86df99fc2331e72b97450dc45c63c7ac5abf64401326c: Status 404 returned error can't find the container with id cf2e72d16c4b87d29fd86df99fc2331e72b97450dc45c63c7ac5abf64401326c Dec 11 10:30:10 crc kubenswrapper[4663]: I1211 10:30:10.813201 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7cc96b59bf-9bdh8"] Dec 11 10:30:10 crc kubenswrapper[4663]: W1211 10:30:10.817681 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1bcb873_ea08_4017_b4c9_d52f48def720.slice/crio-b4900cb8b743c7ee16b7c9dcb7b539a48c342254bdff643fc670ba123a531783 WatchSource:0}: Error finding container b4900cb8b743c7ee16b7c9dcb7b539a48c342254bdff643fc670ba123a531783: Status 404 returned error can't find the container with id b4900cb8b743c7ee16b7c9dcb7b539a48c342254bdff643fc670ba123a531783 Dec 11 10:30:11 crc kubenswrapper[4663]: I1211 10:30:11.667710 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-xpdhc" event={"ID":"66a3bc5f-1b5b-432b-9ea2-b3ea5f9fc313","Type":"ContainerStarted","Data":"41ff886ccde778125d2d2ee5a41bf1149ef30dee6f881243a2c34847a7dae444"} Dec 11 10:30:11 crc kubenswrapper[4663]: I1211 10:30:11.669312 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7cc96b59bf-9bdh8" event={"ID":"c1bcb873-ea08-4017-b4c9-d52f48def720","Type":"ContainerStarted","Data":"4b67c143c114929bc879e046c0c039522615e8a4b67c7879a17fea58aa9ea78b"} Dec 11 10:30:11 crc kubenswrapper[4663]: I1211 10:30:11.669395 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7cc96b59bf-9bdh8" event={"ID":"c1bcb873-ea08-4017-b4c9-d52f48def720","Type":"ContainerStarted","Data":"b4900cb8b743c7ee16b7c9dcb7b539a48c342254bdff643fc670ba123a531783"} Dec 11 10:30:11 crc kubenswrapper[4663]: I1211 10:30:11.670620 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" event={"ID":"bf360f09-bc5b-4440-8985-adc5db394494","Type":"ContainerStarted","Data":"cf2e72d16c4b87d29fd86df99fc2331e72b97450dc45c63c7ac5abf64401326c"} Dec 11 10:30:11 crc kubenswrapper[4663]: I1211 10:30:11.686117 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7cc96b59bf-9bdh8" podStartSLOduration=1.686096854 podStartE2EDuration="1.686096854s" podCreationTimestamp="2025-12-11 10:30:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:30:11.684322405 +0000 UTC m=+742.502252190" watchObservedRunningTime="2025-12-11 10:30:11.686096854 +0000 UTC m=+742.504026637" Dec 11 10:30:13 crc kubenswrapper[4663]: I1211 10:30:13.683487 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" event={"ID":"bf360f09-bc5b-4440-8985-adc5db394494","Type":"ContainerStarted","Data":"999a77bcb16e3a4994c78566f1515a9f1094313a2513fc6d8f587c7d11282043"} Dec 11 10:30:13 crc kubenswrapper[4663]: I1211 10:30:13.683939 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" Dec 11 10:30:13 crc kubenswrapper[4663]: I1211 10:30:13.685124 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" event={"ID":"ca2f1c81-8dbc-4d92-95ef-c70df44a0801","Type":"ContainerStarted","Data":"9cbe214e69b25585acbf16077f91191a70b5c3583b882ae1b8ff4668cda2c486"} Dec 11 10:30:13 crc kubenswrapper[4663]: I1211 10:30:13.686709 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-7hlpp" event={"ID":"7a691467-080b-4036-b702-43977278b399","Type":"ContainerStarted","Data":"22b8ff78ca9999a633e4adaa738648a4b2d742ae6a8c45b97796039618debb7c"} Dec 11 10:30:13 crc kubenswrapper[4663]: I1211 10:30:13.687175 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:13 crc kubenswrapper[4663]: I1211 10:30:13.688646 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-xpdhc" event={"ID":"66a3bc5f-1b5b-432b-9ea2-b3ea5f9fc313","Type":"ContainerStarted","Data":"b0ea349a323690fa838427bfebe4cf7c2313bb1f424ad542ae6144514158a227"} Dec 11 10:30:13 crc kubenswrapper[4663]: I1211 10:30:13.704709 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" podStartSLOduration=1.149748592 podStartE2EDuration="3.704700603s" podCreationTimestamp="2025-12-11 10:30:10 +0000 UTC" firstStartedPulling="2025-12-11 10:30:10.776648301 +0000 UTC m=+741.594578084" lastFinishedPulling="2025-12-11 10:30:13.331600311 +0000 UTC m=+744.149530095" observedRunningTime="2025-12-11 10:30:13.702632032 +0000 UTC m=+744.520561816" watchObservedRunningTime="2025-12-11 10:30:13.704700603 +0000 UTC m=+744.522630388" Dec 11 10:30:13 crc kubenswrapper[4663]: I1211 10:30:13.718015 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-47k77" podStartSLOduration=1.002227163 podStartE2EDuration="3.718001398s" podCreationTimestamp="2025-12-11 10:30:10 +0000 UTC" firstStartedPulling="2025-12-11 10:30:10.611278922 +0000 UTC m=+741.429208705" lastFinishedPulling="2025-12-11 10:30:13.327053156 +0000 UTC m=+744.144982940" observedRunningTime="2025-12-11 10:30:13.714535248 +0000 UTC m=+744.532465031" watchObservedRunningTime="2025-12-11 10:30:13.718001398 +0000 UTC m=+744.535931182" Dec 11 10:30:14 crc kubenswrapper[4663]: I1211 10:30:14.384322 4663 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 10:30:15 crc kubenswrapper[4663]: I1211 10:30:15.700121 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-xpdhc" event={"ID":"66a3bc5f-1b5b-432b-9ea2-b3ea5f9fc313","Type":"ContainerStarted","Data":"9b24e21b323213ec828d4dc28f577b59c309b083707ec0acd1efc44add7e7c2b"} Dec 11 10:30:15 crc kubenswrapper[4663]: I1211 10:30:15.714896 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-xpdhc" podStartSLOduration=0.95361423 podStartE2EDuration="5.71488356s" podCreationTimestamp="2025-12-11 10:30:10 +0000 UTC" firstStartedPulling="2025-12-11 10:30:10.742135155 +0000 UTC m=+741.560064939" lastFinishedPulling="2025-12-11 10:30:15.503404484 +0000 UTC m=+746.321334269" observedRunningTime="2025-12-11 10:30:15.713214009 +0000 UTC m=+746.531143792" watchObservedRunningTime="2025-12-11 10:30:15.71488356 +0000 UTC m=+746.532813344" Dec 11 10:30:15 crc kubenswrapper[4663]: I1211 10:30:15.715114 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-7hlpp" podStartSLOduration=2.8185065419999997 podStartE2EDuration="5.715108462s" podCreationTimestamp="2025-12-11 10:30:10 +0000 UTC" firstStartedPulling="2025-12-11 10:30:10.440905687 +0000 UTC m=+741.258835470" lastFinishedPulling="2025-12-11 10:30:13.337507606 +0000 UTC m=+744.155437390" observedRunningTime="2025-12-11 10:30:13.72846718 +0000 UTC m=+744.546396963" watchObservedRunningTime="2025-12-11 10:30:15.715108462 +0000 UTC m=+746.533038247" Dec 11 10:30:19 crc kubenswrapper[4663]: I1211 10:30:19.271088 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:30:19 crc kubenswrapper[4663]: I1211 10:30:19.271318 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:30:20 crc kubenswrapper[4663]: I1211 10:30:20.433708 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-7hlpp" Dec 11 10:30:20 crc kubenswrapper[4663]: I1211 10:30:20.663672 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:20 crc kubenswrapper[4663]: I1211 10:30:20.663712 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:20 crc kubenswrapper[4663]: I1211 10:30:20.667459 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:20 crc kubenswrapper[4663]: I1211 10:30:20.726223 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7cc96b59bf-9bdh8" Dec 11 10:30:20 crc kubenswrapper[4663]: I1211 10:30:20.757390 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-dc8cp"] Dec 11 10:30:30 crc kubenswrapper[4663]: I1211 10:30:30.373383 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-f8fb84555-jqmgx" Dec 11 10:30:39 crc kubenswrapper[4663]: I1211 10:30:39.907670 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8"] Dec 11 10:30:39 crc kubenswrapper[4663]: I1211 10:30:39.910280 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:39 crc kubenswrapper[4663]: I1211 10:30:39.916058 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 11 10:30:39 crc kubenswrapper[4663]: I1211 10:30:39.936211 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8"] Dec 11 10:30:39 crc kubenswrapper[4663]: I1211 10:30:39.945418 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/862b7639-2f10-4cb5-be71-8e57bee9eb6b-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8\" (UID: \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:39 crc kubenswrapper[4663]: I1211 10:30:39.945533 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/862b7639-2f10-4cb5-be71-8e57bee9eb6b-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8\" (UID: \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:39 crc kubenswrapper[4663]: I1211 10:30:39.945678 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tl67w\" (UniqueName: \"kubernetes.io/projected/862b7639-2f10-4cb5-be71-8e57bee9eb6b-kube-api-access-tl67w\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8\" (UID: \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:40 crc kubenswrapper[4663]: I1211 10:30:40.047234 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tl67w\" (UniqueName: \"kubernetes.io/projected/862b7639-2f10-4cb5-be71-8e57bee9eb6b-kube-api-access-tl67w\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8\" (UID: \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:40 crc kubenswrapper[4663]: I1211 10:30:40.047344 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/862b7639-2f10-4cb5-be71-8e57bee9eb6b-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8\" (UID: \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:40 crc kubenswrapper[4663]: I1211 10:30:40.047380 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/862b7639-2f10-4cb5-be71-8e57bee9eb6b-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8\" (UID: \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:40 crc kubenswrapper[4663]: I1211 10:30:40.047754 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/862b7639-2f10-4cb5-be71-8e57bee9eb6b-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8\" (UID: \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:40 crc kubenswrapper[4663]: I1211 10:30:40.047860 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/862b7639-2f10-4cb5-be71-8e57bee9eb6b-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8\" (UID: \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:40 crc kubenswrapper[4663]: I1211 10:30:40.063073 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tl67w\" (UniqueName: \"kubernetes.io/projected/862b7639-2f10-4cb5-be71-8e57bee9eb6b-kube-api-access-tl67w\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8\" (UID: \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:40 crc kubenswrapper[4663]: I1211 10:30:40.246219 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:40 crc kubenswrapper[4663]: I1211 10:30:40.593318 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8"] Dec 11 10:30:40 crc kubenswrapper[4663]: I1211 10:30:40.809864 4663 generic.go:334] "Generic (PLEG): container finished" podID="862b7639-2f10-4cb5-be71-8e57bee9eb6b" containerID="212c133d0a4f59131acd6f1147e80425c8140972f0d310abdf017f5f99947dff" exitCode=0 Dec 11 10:30:40 crc kubenswrapper[4663]: I1211 10:30:40.809908 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" event={"ID":"862b7639-2f10-4cb5-be71-8e57bee9eb6b","Type":"ContainerDied","Data":"212c133d0a4f59131acd6f1147e80425c8140972f0d310abdf017f5f99947dff"} Dec 11 10:30:40 crc kubenswrapper[4663]: I1211 10:30:40.810124 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" event={"ID":"862b7639-2f10-4cb5-be71-8e57bee9eb6b","Type":"ContainerStarted","Data":"5ad39c7464f4fa01dd32e34aa7050a69f1170eeaf18dc27df900c975fd1f63ff"} Dec 11 10:30:42 crc kubenswrapper[4663]: I1211 10:30:42.820542 4663 generic.go:334] "Generic (PLEG): container finished" podID="862b7639-2f10-4cb5-be71-8e57bee9eb6b" containerID="8a67be00d3c4b19278693de483530edc8584ae589f2b5f645ac93d78bf0df361" exitCode=0 Dec 11 10:30:42 crc kubenswrapper[4663]: I1211 10:30:42.820647 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" event={"ID":"862b7639-2f10-4cb5-be71-8e57bee9eb6b","Type":"ContainerDied","Data":"8a67be00d3c4b19278693de483530edc8584ae589f2b5f645ac93d78bf0df361"} Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.460855 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pwwzk"] Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.461785 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.468365 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pwwzk"] Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.488803 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgz9c\" (UniqueName: \"kubernetes.io/projected/d887a7a6-8f87-4750-bb31-9eb324d69a11-kube-api-access-dgz9c\") pod \"redhat-operators-pwwzk\" (UID: \"d887a7a6-8f87-4750-bb31-9eb324d69a11\") " pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.488853 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d887a7a6-8f87-4750-bb31-9eb324d69a11-catalog-content\") pod \"redhat-operators-pwwzk\" (UID: \"d887a7a6-8f87-4750-bb31-9eb324d69a11\") " pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.488915 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d887a7a6-8f87-4750-bb31-9eb324d69a11-utilities\") pod \"redhat-operators-pwwzk\" (UID: \"d887a7a6-8f87-4750-bb31-9eb324d69a11\") " pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.590704 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgz9c\" (UniqueName: \"kubernetes.io/projected/d887a7a6-8f87-4750-bb31-9eb324d69a11-kube-api-access-dgz9c\") pod \"redhat-operators-pwwzk\" (UID: \"d887a7a6-8f87-4750-bb31-9eb324d69a11\") " pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.590746 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d887a7a6-8f87-4750-bb31-9eb324d69a11-catalog-content\") pod \"redhat-operators-pwwzk\" (UID: \"d887a7a6-8f87-4750-bb31-9eb324d69a11\") " pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.590814 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d887a7a6-8f87-4750-bb31-9eb324d69a11-utilities\") pod \"redhat-operators-pwwzk\" (UID: \"d887a7a6-8f87-4750-bb31-9eb324d69a11\") " pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.591291 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d887a7a6-8f87-4750-bb31-9eb324d69a11-utilities\") pod \"redhat-operators-pwwzk\" (UID: \"d887a7a6-8f87-4750-bb31-9eb324d69a11\") " pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.591358 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d887a7a6-8f87-4750-bb31-9eb324d69a11-catalog-content\") pod \"redhat-operators-pwwzk\" (UID: \"d887a7a6-8f87-4750-bb31-9eb324d69a11\") " pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.607041 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgz9c\" (UniqueName: \"kubernetes.io/projected/d887a7a6-8f87-4750-bb31-9eb324d69a11-kube-api-access-dgz9c\") pod \"redhat-operators-pwwzk\" (UID: \"d887a7a6-8f87-4750-bb31-9eb324d69a11\") " pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.775060 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.827025 4663 generic.go:334] "Generic (PLEG): container finished" podID="862b7639-2f10-4cb5-be71-8e57bee9eb6b" containerID="fb23976c223d61ff4c34079748d48f68dd0e63d88b32ca01bcb2d928263fad41" exitCode=0 Dec 11 10:30:43 crc kubenswrapper[4663]: I1211 10:30:43.827084 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" event={"ID":"862b7639-2f10-4cb5-be71-8e57bee9eb6b","Type":"ContainerDied","Data":"fb23976c223d61ff4c34079748d48f68dd0e63d88b32ca01bcb2d928263fad41"} Dec 11 10:30:44 crc kubenswrapper[4663]: I1211 10:30:44.162447 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pwwzk"] Dec 11 10:30:44 crc kubenswrapper[4663]: I1211 10:30:44.833202 4663 generic.go:334] "Generic (PLEG): container finished" podID="d887a7a6-8f87-4750-bb31-9eb324d69a11" containerID="2d3a0f9d10a4a0987dd7e95f5ddffc81925c69d098cad6b90310a294b51044eb" exitCode=0 Dec 11 10:30:44 crc kubenswrapper[4663]: I1211 10:30:44.833283 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwwzk" event={"ID":"d887a7a6-8f87-4750-bb31-9eb324d69a11","Type":"ContainerDied","Data":"2d3a0f9d10a4a0987dd7e95f5ddffc81925c69d098cad6b90310a294b51044eb"} Dec 11 10:30:44 crc kubenswrapper[4663]: I1211 10:30:44.833437 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwwzk" event={"ID":"d887a7a6-8f87-4750-bb31-9eb324d69a11","Type":"ContainerStarted","Data":"f2e334206e3d07031d18b5295733079bad139e052bb82dbdd906820c449587aa"} Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.037799 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.109666 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/862b7639-2f10-4cb5-be71-8e57bee9eb6b-util\") pod \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\" (UID: \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\") " Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.109793 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tl67w\" (UniqueName: \"kubernetes.io/projected/862b7639-2f10-4cb5-be71-8e57bee9eb6b-kube-api-access-tl67w\") pod \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\" (UID: \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\") " Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.109828 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/862b7639-2f10-4cb5-be71-8e57bee9eb6b-bundle\") pod \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\" (UID: \"862b7639-2f10-4cb5-be71-8e57bee9eb6b\") " Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.110690 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/862b7639-2f10-4cb5-be71-8e57bee9eb6b-bundle" (OuterVolumeSpecName: "bundle") pod "862b7639-2f10-4cb5-be71-8e57bee9eb6b" (UID: "862b7639-2f10-4cb5-be71-8e57bee9eb6b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.114387 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/862b7639-2f10-4cb5-be71-8e57bee9eb6b-kube-api-access-tl67w" (OuterVolumeSpecName: "kube-api-access-tl67w") pod "862b7639-2f10-4cb5-be71-8e57bee9eb6b" (UID: "862b7639-2f10-4cb5-be71-8e57bee9eb6b"). InnerVolumeSpecName "kube-api-access-tl67w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.119552 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/862b7639-2f10-4cb5-be71-8e57bee9eb6b-util" (OuterVolumeSpecName: "util") pod "862b7639-2f10-4cb5-be71-8e57bee9eb6b" (UID: "862b7639-2f10-4cb5-be71-8e57bee9eb6b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.211080 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tl67w\" (UniqueName: \"kubernetes.io/projected/862b7639-2f10-4cb5-be71-8e57bee9eb6b-kube-api-access-tl67w\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.211117 4663 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/862b7639-2f10-4cb5-be71-8e57bee9eb6b-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.211126 4663 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/862b7639-2f10-4cb5-be71-8e57bee9eb6b-util\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.781253 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-dc8cp" podUID="d18be0a7-2aaa-4aaa-9d37-f44453e994d8" containerName="console" containerID="cri-o://4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1" gracePeriod=15 Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.839037 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" event={"ID":"862b7639-2f10-4cb5-be71-8e57bee9eb6b","Type":"ContainerDied","Data":"5ad39c7464f4fa01dd32e34aa7050a69f1170eeaf18dc27df900c975fd1f63ff"} Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.839097 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ad39c7464f4fa01dd32e34aa7050a69f1170eeaf18dc27df900c975fd1f63ff" Dec 11 10:30:45 crc kubenswrapper[4663]: I1211 10:30:45.839074 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.071791 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-dc8cp_d18be0a7-2aaa-4aaa-9d37-f44453e994d8/console/0.log" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.072054 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.120381 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-oauth-config\") pod \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.120424 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-config\") pod \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.120455 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-service-ca\") pod \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.120512 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-oauth-serving-cert\") pod \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.120555 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-serving-cert\") pod \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.120572 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skwwp\" (UniqueName: \"kubernetes.io/projected/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-kube-api-access-skwwp\") pod \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.120589 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-trusted-ca-bundle\") pod \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\" (UID: \"d18be0a7-2aaa-4aaa-9d37-f44453e994d8\") " Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.121307 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "d18be0a7-2aaa-4aaa-9d37-f44453e994d8" (UID: "d18be0a7-2aaa-4aaa-9d37-f44453e994d8"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.121332 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-config" (OuterVolumeSpecName: "console-config") pod "d18be0a7-2aaa-4aaa-9d37-f44453e994d8" (UID: "d18be0a7-2aaa-4aaa-9d37-f44453e994d8"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.121312 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-service-ca" (OuterVolumeSpecName: "service-ca") pod "d18be0a7-2aaa-4aaa-9d37-f44453e994d8" (UID: "d18be0a7-2aaa-4aaa-9d37-f44453e994d8"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.121327 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "d18be0a7-2aaa-4aaa-9d37-f44453e994d8" (UID: "d18be0a7-2aaa-4aaa-9d37-f44453e994d8"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.124554 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "d18be0a7-2aaa-4aaa-9d37-f44453e994d8" (UID: "d18be0a7-2aaa-4aaa-9d37-f44453e994d8"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.124859 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-kube-api-access-skwwp" (OuterVolumeSpecName: "kube-api-access-skwwp") pod "d18be0a7-2aaa-4aaa-9d37-f44453e994d8" (UID: "d18be0a7-2aaa-4aaa-9d37-f44453e994d8"). InnerVolumeSpecName "kube-api-access-skwwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.125119 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "d18be0a7-2aaa-4aaa-9d37-f44453e994d8" (UID: "d18be0a7-2aaa-4aaa-9d37-f44453e994d8"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.222640 4663 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.222682 4663 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.222694 4663 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.222702 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skwwp\" (UniqueName: \"kubernetes.io/projected/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-kube-api-access-skwwp\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.222711 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.222719 4663 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.222727 4663 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d18be0a7-2aaa-4aaa-9d37-f44453e994d8-console-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.844978 4663 generic.go:334] "Generic (PLEG): container finished" podID="d887a7a6-8f87-4750-bb31-9eb324d69a11" containerID="382d392f257dfe3beb4d074eda1a9e7abde194b05242b5cd34cc6d03e9891229" exitCode=0 Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.845061 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwwzk" event={"ID":"d887a7a6-8f87-4750-bb31-9eb324d69a11","Type":"ContainerDied","Data":"382d392f257dfe3beb4d074eda1a9e7abde194b05242b5cd34cc6d03e9891229"} Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.846018 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-dc8cp_d18be0a7-2aaa-4aaa-9d37-f44453e994d8/console/0.log" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.846054 4663 generic.go:334] "Generic (PLEG): container finished" podID="d18be0a7-2aaa-4aaa-9d37-f44453e994d8" containerID="4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1" exitCode=2 Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.846082 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dc8cp" event={"ID":"d18be0a7-2aaa-4aaa-9d37-f44453e994d8","Type":"ContainerDied","Data":"4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1"} Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.846132 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dc8cp" event={"ID":"d18be0a7-2aaa-4aaa-9d37-f44453e994d8","Type":"ContainerDied","Data":"ad0b211d0cb87c19996d48bd7b621ef4d708b5586d535220e706e4c893409a10"} Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.846151 4663 scope.go:117] "RemoveContainer" containerID="4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.846092 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dc8cp" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.859153 4663 scope.go:117] "RemoveContainer" containerID="4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1" Dec 11 10:30:46 crc kubenswrapper[4663]: E1211 10:30:46.859485 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1\": container with ID starting with 4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1 not found: ID does not exist" containerID="4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.859516 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1"} err="failed to get container status \"4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1\": rpc error: code = NotFound desc = could not find container \"4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1\": container with ID starting with 4a878ed1dc6b6568d1664f7125ba68ca66c30d623daf66591d8b88f7155fa2e1 not found: ID does not exist" Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.871506 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-dc8cp"] Dec 11 10:30:46 crc kubenswrapper[4663]: I1211 10:30:46.874761 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-dc8cp"] Dec 11 10:30:47 crc kubenswrapper[4663]: I1211 10:30:47.510204 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d18be0a7-2aaa-4aaa-9d37-f44453e994d8" path="/var/lib/kubelet/pods/d18be0a7-2aaa-4aaa-9d37-f44453e994d8/volumes" Dec 11 10:30:47 crc kubenswrapper[4663]: I1211 10:30:47.852895 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwwzk" event={"ID":"d887a7a6-8f87-4750-bb31-9eb324d69a11","Type":"ContainerStarted","Data":"b314a24ed8a0016a4cda6338945c97c8b1a6d165475e69f8a51513c9baf55739"} Dec 11 10:30:47 crc kubenswrapper[4663]: I1211 10:30:47.866733 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pwwzk" podStartSLOduration=2.139336088 podStartE2EDuration="4.866720437s" podCreationTimestamp="2025-12-11 10:30:43 +0000 UTC" firstStartedPulling="2025-12-11 10:30:44.834222994 +0000 UTC m=+775.652152777" lastFinishedPulling="2025-12-11 10:30:47.561607342 +0000 UTC m=+778.379537126" observedRunningTime="2025-12-11 10:30:47.86336818 +0000 UTC m=+778.681297954" watchObservedRunningTime="2025-12-11 10:30:47.866720437 +0000 UTC m=+778.684650221" Dec 11 10:30:49 crc kubenswrapper[4663]: I1211 10:30:49.271216 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:30:49 crc kubenswrapper[4663]: I1211 10:30:49.271841 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:30:49 crc kubenswrapper[4663]: I1211 10:30:49.271942 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:30:49 crc kubenswrapper[4663]: I1211 10:30:49.272466 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d6a383f638ba877808ee9ae4a0f108f5e0f431193b4b9388090fdfc37a56b27f"} pod="openshift-machine-config-operator/machine-config-daemon-mq524" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:30:49 crc kubenswrapper[4663]: I1211 10:30:49.272584 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" containerID="cri-o://d6a383f638ba877808ee9ae4a0f108f5e0f431193b4b9388090fdfc37a56b27f" gracePeriod=600 Dec 11 10:30:49 crc kubenswrapper[4663]: I1211 10:30:49.864722 4663 generic.go:334] "Generic (PLEG): container finished" podID="98941020-2764-490d-81db-9decd21aaa13" containerID="d6a383f638ba877808ee9ae4a0f108f5e0f431193b4b9388090fdfc37a56b27f" exitCode=0 Dec 11 10:30:49 crc kubenswrapper[4663]: I1211 10:30:49.864752 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerDied","Data":"d6a383f638ba877808ee9ae4a0f108f5e0f431193b4b9388090fdfc37a56b27f"} Dec 11 10:30:49 crc kubenswrapper[4663]: I1211 10:30:49.864969 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerStarted","Data":"8c8254fc38982caf8a920e34b1f8132d5bd94723c9a1ecbeeb5eec5817ed54be"} Dec 11 10:30:49 crc kubenswrapper[4663]: I1211 10:30:49.864992 4663 scope.go:117] "RemoveContainer" containerID="71596b251b77ae3f29e9862f15f8d7073ec4222881d08d3a5d12d46cc1d6d5ef" Dec 11 10:30:53 crc kubenswrapper[4663]: I1211 10:30:53.775805 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:53 crc kubenswrapper[4663]: I1211 10:30:53.776154 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:53 crc kubenswrapper[4663]: I1211 10:30:53.803923 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:53 crc kubenswrapper[4663]: I1211 10:30:53.909652 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.435255 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8"] Dec 11 10:30:56 crc kubenswrapper[4663]: E1211 10:30:56.435633 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d18be0a7-2aaa-4aaa-9d37-f44453e994d8" containerName="console" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.435644 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d18be0a7-2aaa-4aaa-9d37-f44453e994d8" containerName="console" Dec 11 10:30:56 crc kubenswrapper[4663]: E1211 10:30:56.435653 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="862b7639-2f10-4cb5-be71-8e57bee9eb6b" containerName="extract" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.435660 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="862b7639-2f10-4cb5-be71-8e57bee9eb6b" containerName="extract" Dec 11 10:30:56 crc kubenswrapper[4663]: E1211 10:30:56.435672 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="862b7639-2f10-4cb5-be71-8e57bee9eb6b" containerName="util" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.435677 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="862b7639-2f10-4cb5-be71-8e57bee9eb6b" containerName="util" Dec 11 10:30:56 crc kubenswrapper[4663]: E1211 10:30:56.435688 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="862b7639-2f10-4cb5-be71-8e57bee9eb6b" containerName="pull" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.435693 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="862b7639-2f10-4cb5-be71-8e57bee9eb6b" containerName="pull" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.435784 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="d18be0a7-2aaa-4aaa-9d37-f44453e994d8" containerName="console" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.435796 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="862b7639-2f10-4cb5-be71-8e57bee9eb6b" containerName="extract" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.436233 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.438168 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-lmf4c" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.439045 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.439413 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.439625 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.440250 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.462191 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8"] Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.543363 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/46b9f037-3a95-414e-9089-95dd401c7b2d-apiservice-cert\") pod \"metallb-operator-controller-manager-f8979885c-gpvr8\" (UID: \"46b9f037-3a95-414e-9089-95dd401c7b2d\") " pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.543438 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pt2ln\" (UniqueName: \"kubernetes.io/projected/46b9f037-3a95-414e-9089-95dd401c7b2d-kube-api-access-pt2ln\") pod \"metallb-operator-controller-manager-f8979885c-gpvr8\" (UID: \"46b9f037-3a95-414e-9089-95dd401c7b2d\") " pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.543610 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/46b9f037-3a95-414e-9089-95dd401c7b2d-webhook-cert\") pod \"metallb-operator-controller-manager-f8979885c-gpvr8\" (UID: \"46b9f037-3a95-414e-9089-95dd401c7b2d\") " pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.644617 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/46b9f037-3a95-414e-9089-95dd401c7b2d-webhook-cert\") pod \"metallb-operator-controller-manager-f8979885c-gpvr8\" (UID: \"46b9f037-3a95-414e-9089-95dd401c7b2d\") " pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.644683 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/46b9f037-3a95-414e-9089-95dd401c7b2d-apiservice-cert\") pod \"metallb-operator-controller-manager-f8979885c-gpvr8\" (UID: \"46b9f037-3a95-414e-9089-95dd401c7b2d\") " pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.644724 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pt2ln\" (UniqueName: \"kubernetes.io/projected/46b9f037-3a95-414e-9089-95dd401c7b2d-kube-api-access-pt2ln\") pod \"metallb-operator-controller-manager-f8979885c-gpvr8\" (UID: \"46b9f037-3a95-414e-9089-95dd401c7b2d\") " pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.649485 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/46b9f037-3a95-414e-9089-95dd401c7b2d-webhook-cert\") pod \"metallb-operator-controller-manager-f8979885c-gpvr8\" (UID: \"46b9f037-3a95-414e-9089-95dd401c7b2d\") " pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.650735 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/46b9f037-3a95-414e-9089-95dd401c7b2d-apiservice-cert\") pod \"metallb-operator-controller-manager-f8979885c-gpvr8\" (UID: \"46b9f037-3a95-414e-9089-95dd401c7b2d\") " pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.658642 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pt2ln\" (UniqueName: \"kubernetes.io/projected/46b9f037-3a95-414e-9089-95dd401c7b2d-kube-api-access-pt2ln\") pod \"metallb-operator-controller-manager-f8979885c-gpvr8\" (UID: \"46b9f037-3a95-414e-9089-95dd401c7b2d\") " pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.736261 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn"] Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.737066 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.739490 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-qqvk2" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.739546 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.739491 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.749153 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.753998 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn"] Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.848344 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/322fecc6-f5c2-48c6-a014-91ca3c99b00f-apiservice-cert\") pod \"metallb-operator-webhook-server-6c9b679f4-7xwnn\" (UID: \"322fecc6-f5c2-48c6-a014-91ca3c99b00f\") " pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.848412 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlrp8\" (UniqueName: \"kubernetes.io/projected/322fecc6-f5c2-48c6-a014-91ca3c99b00f-kube-api-access-dlrp8\") pod \"metallb-operator-webhook-server-6c9b679f4-7xwnn\" (UID: \"322fecc6-f5c2-48c6-a014-91ca3c99b00f\") " pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.848433 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/322fecc6-f5c2-48c6-a014-91ca3c99b00f-webhook-cert\") pod \"metallb-operator-webhook-server-6c9b679f4-7xwnn\" (UID: \"322fecc6-f5c2-48c6-a014-91ca3c99b00f\") " pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.860204 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pwwzk"] Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.860401 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pwwzk" podUID="d887a7a6-8f87-4750-bb31-9eb324d69a11" containerName="registry-server" containerID="cri-o://b314a24ed8a0016a4cda6338945c97c8b1a6d165475e69f8a51513c9baf55739" gracePeriod=2 Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.949299 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/322fecc6-f5c2-48c6-a014-91ca3c99b00f-apiservice-cert\") pod \"metallb-operator-webhook-server-6c9b679f4-7xwnn\" (UID: \"322fecc6-f5c2-48c6-a014-91ca3c99b00f\") " pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.949375 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlrp8\" (UniqueName: \"kubernetes.io/projected/322fecc6-f5c2-48c6-a014-91ca3c99b00f-kube-api-access-dlrp8\") pod \"metallb-operator-webhook-server-6c9b679f4-7xwnn\" (UID: \"322fecc6-f5c2-48c6-a014-91ca3c99b00f\") " pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.949395 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/322fecc6-f5c2-48c6-a014-91ca3c99b00f-webhook-cert\") pod \"metallb-operator-webhook-server-6c9b679f4-7xwnn\" (UID: \"322fecc6-f5c2-48c6-a014-91ca3c99b00f\") " pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.953661 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/322fecc6-f5c2-48c6-a014-91ca3c99b00f-apiservice-cert\") pod \"metallb-operator-webhook-server-6c9b679f4-7xwnn\" (UID: \"322fecc6-f5c2-48c6-a014-91ca3c99b00f\") " pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.953726 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/322fecc6-f5c2-48c6-a014-91ca3c99b00f-webhook-cert\") pod \"metallb-operator-webhook-server-6c9b679f4-7xwnn\" (UID: \"322fecc6-f5c2-48c6-a014-91ca3c99b00f\") " pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:30:56 crc kubenswrapper[4663]: I1211 10:30:56.965734 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlrp8\" (UniqueName: \"kubernetes.io/projected/322fecc6-f5c2-48c6-a014-91ca3c99b00f-kube-api-access-dlrp8\") pod \"metallb-operator-webhook-server-6c9b679f4-7xwnn\" (UID: \"322fecc6-f5c2-48c6-a014-91ca3c99b00f\") " pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:30:57 crc kubenswrapper[4663]: I1211 10:30:57.052258 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:30:57 crc kubenswrapper[4663]: I1211 10:30:57.130338 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8"] Dec 11 10:30:57 crc kubenswrapper[4663]: I1211 10:30:57.416407 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn"] Dec 11 10:30:57 crc kubenswrapper[4663]: W1211 10:30:57.419944 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod322fecc6_f5c2_48c6_a014_91ca3c99b00f.slice/crio-7f1500c7a0f4696417fc6265132df3628e16b0d65f4b53c053577cdcf5a1f7ab WatchSource:0}: Error finding container 7f1500c7a0f4696417fc6265132df3628e16b0d65f4b53c053577cdcf5a1f7ab: Status 404 returned error can't find the container with id 7f1500c7a0f4696417fc6265132df3628e16b0d65f4b53c053577cdcf5a1f7ab Dec 11 10:30:57 crc kubenswrapper[4663]: I1211 10:30:57.901803 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" event={"ID":"322fecc6-f5c2-48c6-a014-91ca3c99b00f","Type":"ContainerStarted","Data":"7f1500c7a0f4696417fc6265132df3628e16b0d65f4b53c053577cdcf5a1f7ab"} Dec 11 10:30:57 crc kubenswrapper[4663]: I1211 10:30:57.902693 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" event={"ID":"46b9f037-3a95-414e-9089-95dd401c7b2d","Type":"ContainerStarted","Data":"23ffeec95b95bab9a1e8b497aa4969f692b4a219c3b117f1f98b73f3d56b3dc3"} Dec 11 10:30:57 crc kubenswrapper[4663]: I1211 10:30:57.904668 4663 generic.go:334] "Generic (PLEG): container finished" podID="d887a7a6-8f87-4750-bb31-9eb324d69a11" containerID="b314a24ed8a0016a4cda6338945c97c8b1a6d165475e69f8a51513c9baf55739" exitCode=0 Dec 11 10:30:57 crc kubenswrapper[4663]: I1211 10:30:57.904702 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwwzk" event={"ID":"d887a7a6-8f87-4750-bb31-9eb324d69a11","Type":"ContainerDied","Data":"b314a24ed8a0016a4cda6338945c97c8b1a6d165475e69f8a51513c9baf55739"} Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.255004 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.371237 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d887a7a6-8f87-4750-bb31-9eb324d69a11-catalog-content\") pod \"d887a7a6-8f87-4750-bb31-9eb324d69a11\" (UID: \"d887a7a6-8f87-4750-bb31-9eb324d69a11\") " Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.371294 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d887a7a6-8f87-4750-bb31-9eb324d69a11-utilities\") pod \"d887a7a6-8f87-4750-bb31-9eb324d69a11\" (UID: \"d887a7a6-8f87-4750-bb31-9eb324d69a11\") " Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.371368 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgz9c\" (UniqueName: \"kubernetes.io/projected/d887a7a6-8f87-4750-bb31-9eb324d69a11-kube-api-access-dgz9c\") pod \"d887a7a6-8f87-4750-bb31-9eb324d69a11\" (UID: \"d887a7a6-8f87-4750-bb31-9eb324d69a11\") " Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.372225 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d887a7a6-8f87-4750-bb31-9eb324d69a11-utilities" (OuterVolumeSpecName: "utilities") pod "d887a7a6-8f87-4750-bb31-9eb324d69a11" (UID: "d887a7a6-8f87-4750-bb31-9eb324d69a11"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.388494 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d887a7a6-8f87-4750-bb31-9eb324d69a11-kube-api-access-dgz9c" (OuterVolumeSpecName: "kube-api-access-dgz9c") pod "d887a7a6-8f87-4750-bb31-9eb324d69a11" (UID: "d887a7a6-8f87-4750-bb31-9eb324d69a11"). InnerVolumeSpecName "kube-api-access-dgz9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.462210 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d887a7a6-8f87-4750-bb31-9eb324d69a11-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d887a7a6-8f87-4750-bb31-9eb324d69a11" (UID: "d887a7a6-8f87-4750-bb31-9eb324d69a11"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.473077 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgz9c\" (UniqueName: \"kubernetes.io/projected/d887a7a6-8f87-4750-bb31-9eb324d69a11-kube-api-access-dgz9c\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.473111 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d887a7a6-8f87-4750-bb31-9eb324d69a11-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.473121 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d887a7a6-8f87-4750-bb31-9eb324d69a11-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.937193 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwwzk" event={"ID":"d887a7a6-8f87-4750-bb31-9eb324d69a11","Type":"ContainerDied","Data":"f2e334206e3d07031d18b5295733079bad139e052bb82dbdd906820c449587aa"} Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.937244 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pwwzk" Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.937251 4663 scope.go:117] "RemoveContainer" containerID="b314a24ed8a0016a4cda6338945c97c8b1a6d165475e69f8a51513c9baf55739" Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.978301 4663 scope.go:117] "RemoveContainer" containerID="382d392f257dfe3beb4d074eda1a9e7abde194b05242b5cd34cc6d03e9891229" Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.986040 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pwwzk"] Dec 11 10:30:58 crc kubenswrapper[4663]: I1211 10:30:58.990147 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pwwzk"] Dec 11 10:30:59 crc kubenswrapper[4663]: I1211 10:30:59.005944 4663 scope.go:117] "RemoveContainer" containerID="2d3a0f9d10a4a0987dd7e95f5ddffc81925c69d098cad6b90310a294b51044eb" Dec 11 10:30:59 crc kubenswrapper[4663]: I1211 10:30:59.513875 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d887a7a6-8f87-4750-bb31-9eb324d69a11" path="/var/lib/kubelet/pods/d887a7a6-8f87-4750-bb31-9eb324d69a11/volumes" Dec 11 10:31:01 crc kubenswrapper[4663]: I1211 10:31:01.958429 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" event={"ID":"322fecc6-f5c2-48c6-a014-91ca3c99b00f","Type":"ContainerStarted","Data":"ca77797e791273353e574e9b1df7aa26b1f8939d21cdb699d8d76be86be15e89"} Dec 11 10:31:01 crc kubenswrapper[4663]: I1211 10:31:01.959372 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:31:01 crc kubenswrapper[4663]: I1211 10:31:01.960551 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" event={"ID":"46b9f037-3a95-414e-9089-95dd401c7b2d","Type":"ContainerStarted","Data":"2ddcc80053b1da337c013b01a617423475badbde29a9aee4bbd8845aa7d94224"} Dec 11 10:31:01 crc kubenswrapper[4663]: I1211 10:31:01.960607 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:31:01 crc kubenswrapper[4663]: I1211 10:31:01.972792 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" podStartSLOduration=2.416120425 podStartE2EDuration="5.972780671s" podCreationTimestamp="2025-12-11 10:30:56 +0000 UTC" firstStartedPulling="2025-12-11 10:30:57.422124135 +0000 UTC m=+788.240053920" lastFinishedPulling="2025-12-11 10:31:00.978784381 +0000 UTC m=+791.796714166" observedRunningTime="2025-12-11 10:31:01.969700247 +0000 UTC m=+792.787630031" watchObservedRunningTime="2025-12-11 10:31:01.972780671 +0000 UTC m=+792.790710455" Dec 11 10:31:01 crc kubenswrapper[4663]: I1211 10:31:01.989001 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" podStartSLOduration=2.164660947 podStartE2EDuration="5.988987606s" podCreationTimestamp="2025-12-11 10:30:56 +0000 UTC" firstStartedPulling="2025-12-11 10:30:57.136163093 +0000 UTC m=+787.954092876" lastFinishedPulling="2025-12-11 10:31:00.960489751 +0000 UTC m=+791.778419535" observedRunningTime="2025-12-11 10:31:01.985873528 +0000 UTC m=+792.803803302" watchObservedRunningTime="2025-12-11 10:31:01.988987606 +0000 UTC m=+792.806917389" Dec 11 10:31:17 crc kubenswrapper[4663]: I1211 10:31:17.059451 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6c9b679f4-7xwnn" Dec 11 10:31:36 crc kubenswrapper[4663]: I1211 10:31:36.752661 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-f8979885c-gpvr8" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.285984 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k"] Dec 11 10:31:37 crc kubenswrapper[4663]: E1211 10:31:37.286454 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d887a7a6-8f87-4750-bb31-9eb324d69a11" containerName="extract-content" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.286478 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d887a7a6-8f87-4750-bb31-9eb324d69a11" containerName="extract-content" Dec 11 10:31:37 crc kubenswrapper[4663]: E1211 10:31:37.286495 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d887a7a6-8f87-4750-bb31-9eb324d69a11" containerName="registry-server" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.286501 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d887a7a6-8f87-4750-bb31-9eb324d69a11" containerName="registry-server" Dec 11 10:31:37 crc kubenswrapper[4663]: E1211 10:31:37.286511 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d887a7a6-8f87-4750-bb31-9eb324d69a11" containerName="extract-utilities" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.286518 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d887a7a6-8f87-4750-bb31-9eb324d69a11" containerName="extract-utilities" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.286624 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="d887a7a6-8f87-4750-bb31-9eb324d69a11" containerName="registry-server" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.287233 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.289468 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-cj69x"] Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.289775 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-tbwf8" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.289875 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.291617 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.294260 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k"] Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.296363 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.296536 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.348053 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-rvzrq"] Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.348861 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-rvzrq" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.350132 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.351582 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.351709 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.352312 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-2qcvs" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.362975 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5bddd4b946-62rrw"] Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.363761 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.365916 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.378166 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-62rrw"] Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.429607 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d6e35eff-30e8-40c3-a7bd-62f87d484142-metallb-excludel2\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.429654 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chx4p\" (UniqueName: \"kubernetes.io/projected/0169573a-ebd0-4a44-9181-5d156081599f-kube-api-access-chx4p\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.429736 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jgsh\" (UniqueName: \"kubernetes.io/projected/8f37f2bb-6cee-40a8-a2d2-366b8ded3853-kube-api-access-9jgsh\") pod \"frr-k8s-webhook-server-7784b6fcf-8bt7k\" (UID: \"8f37f2bb-6cee-40a8-a2d2-366b8ded3853\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.429789 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkmmd\" (UniqueName: \"kubernetes.io/projected/d6e35eff-30e8-40c3-a7bd-62f87d484142-kube-api-access-mkmmd\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.429813 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d6e35eff-30e8-40c3-a7bd-62f87d484142-memberlist\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.429843 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0169573a-ebd0-4a44-9181-5d156081599f-metrics-certs\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.429870 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/0169573a-ebd0-4a44-9181-5d156081599f-frr-sockets\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.429890 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/0169573a-ebd0-4a44-9181-5d156081599f-frr-conf\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.429908 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/0169573a-ebd0-4a44-9181-5d156081599f-frr-startup\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.429926 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/0169573a-ebd0-4a44-9181-5d156081599f-reloader\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.429947 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6e35eff-30e8-40c3-a7bd-62f87d484142-metrics-certs\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.430065 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/0169573a-ebd0-4a44-9181-5d156081599f-metrics\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.430144 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8f37f2bb-6cee-40a8-a2d2-366b8ded3853-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-8bt7k\" (UID: \"8f37f2bb-6cee-40a8-a2d2-366b8ded3853\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.531556 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d6e35eff-30e8-40c3-a7bd-62f87d484142-memberlist\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.531607 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0169573a-ebd0-4a44-9181-5d156081599f-metrics-certs\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.531626 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/0169573a-ebd0-4a44-9181-5d156081599f-frr-sockets\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.531650 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/0169573a-ebd0-4a44-9181-5d156081599f-frr-conf\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.531668 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/0169573a-ebd0-4a44-9181-5d156081599f-frr-startup\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.531690 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/0169573a-ebd0-4a44-9181-5d156081599f-reloader\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: E1211 10:31:37.531753 4663 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 11 10:31:37 crc kubenswrapper[4663]: E1211 10:31:37.531824 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6e35eff-30e8-40c3-a7bd-62f87d484142-memberlist podName:d6e35eff-30e8-40c3-a7bd-62f87d484142 nodeName:}" failed. No retries permitted until 2025-12-11 10:31:38.031806021 +0000 UTC m=+828.849735805 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/d6e35eff-30e8-40c3-a7bd-62f87d484142-memberlist") pod "speaker-rvzrq" (UID: "d6e35eff-30e8-40c3-a7bd-62f87d484142") : secret "metallb-memberlist" not found Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532366 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/0169573a-ebd0-4a44-9181-5d156081599f-reloader\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532387 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m7nz\" (UniqueName: \"kubernetes.io/projected/505c55d2-998b-4df8-bd05-f129b5f8a806-kube-api-access-5m7nz\") pod \"controller-5bddd4b946-62rrw\" (UID: \"505c55d2-998b-4df8-bd05-f129b5f8a806\") " pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532432 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6e35eff-30e8-40c3-a7bd-62f87d484142-metrics-certs\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532458 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/505c55d2-998b-4df8-bd05-f129b5f8a806-cert\") pod \"controller-5bddd4b946-62rrw\" (UID: \"505c55d2-998b-4df8-bd05-f129b5f8a806\") " pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532475 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/505c55d2-998b-4df8-bd05-f129b5f8a806-metrics-certs\") pod \"controller-5bddd4b946-62rrw\" (UID: \"505c55d2-998b-4df8-bd05-f129b5f8a806\") " pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532582 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/0169573a-ebd0-4a44-9181-5d156081599f-metrics\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532626 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/0169573a-ebd0-4a44-9181-5d156081599f-frr-conf\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532642 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8f37f2bb-6cee-40a8-a2d2-366b8ded3853-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-8bt7k\" (UID: \"8f37f2bb-6cee-40a8-a2d2-366b8ded3853\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532681 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d6e35eff-30e8-40c3-a7bd-62f87d484142-metallb-excludel2\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532698 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chx4p\" (UniqueName: \"kubernetes.io/projected/0169573a-ebd0-4a44-9181-5d156081599f-kube-api-access-chx4p\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532722 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jgsh\" (UniqueName: \"kubernetes.io/projected/8f37f2bb-6cee-40a8-a2d2-366b8ded3853-kube-api-access-9jgsh\") pod \"frr-k8s-webhook-server-7784b6fcf-8bt7k\" (UID: \"8f37f2bb-6cee-40a8-a2d2-366b8ded3853\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532749 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkmmd\" (UniqueName: \"kubernetes.io/projected/d6e35eff-30e8-40c3-a7bd-62f87d484142-kube-api-access-mkmmd\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.532852 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/0169573a-ebd0-4a44-9181-5d156081599f-metrics\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.533092 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/0169573a-ebd0-4a44-9181-5d156081599f-frr-sockets\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.533331 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d6e35eff-30e8-40c3-a7bd-62f87d484142-metallb-excludel2\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.533485 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/0169573a-ebd0-4a44-9181-5d156081599f-frr-startup\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.536595 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6e35eff-30e8-40c3-a7bd-62f87d484142-metrics-certs\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.536768 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8f37f2bb-6cee-40a8-a2d2-366b8ded3853-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-8bt7k\" (UID: \"8f37f2bb-6cee-40a8-a2d2-366b8ded3853\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.537353 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0169573a-ebd0-4a44-9181-5d156081599f-metrics-certs\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.545308 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkmmd\" (UniqueName: \"kubernetes.io/projected/d6e35eff-30e8-40c3-a7bd-62f87d484142-kube-api-access-mkmmd\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.546464 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chx4p\" (UniqueName: \"kubernetes.io/projected/0169573a-ebd0-4a44-9181-5d156081599f-kube-api-access-chx4p\") pod \"frr-k8s-cj69x\" (UID: \"0169573a-ebd0-4a44-9181-5d156081599f\") " pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.547274 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jgsh\" (UniqueName: \"kubernetes.io/projected/8f37f2bb-6cee-40a8-a2d2-366b8ded3853-kube-api-access-9jgsh\") pod \"frr-k8s-webhook-server-7784b6fcf-8bt7k\" (UID: \"8f37f2bb-6cee-40a8-a2d2-366b8ded3853\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.609808 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.617852 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.634665 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m7nz\" (UniqueName: \"kubernetes.io/projected/505c55d2-998b-4df8-bd05-f129b5f8a806-kube-api-access-5m7nz\") pod \"controller-5bddd4b946-62rrw\" (UID: \"505c55d2-998b-4df8-bd05-f129b5f8a806\") " pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.634708 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/505c55d2-998b-4df8-bd05-f129b5f8a806-cert\") pod \"controller-5bddd4b946-62rrw\" (UID: \"505c55d2-998b-4df8-bd05-f129b5f8a806\") " pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.634726 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/505c55d2-998b-4df8-bd05-f129b5f8a806-metrics-certs\") pod \"controller-5bddd4b946-62rrw\" (UID: \"505c55d2-998b-4df8-bd05-f129b5f8a806\") " pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.638578 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/505c55d2-998b-4df8-bd05-f129b5f8a806-metrics-certs\") pod \"controller-5bddd4b946-62rrw\" (UID: \"505c55d2-998b-4df8-bd05-f129b5f8a806\") " pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.638626 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/505c55d2-998b-4df8-bd05-f129b5f8a806-cert\") pod \"controller-5bddd4b946-62rrw\" (UID: \"505c55d2-998b-4df8-bd05-f129b5f8a806\") " pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.650911 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m7nz\" (UniqueName: \"kubernetes.io/projected/505c55d2-998b-4df8-bd05-f129b5f8a806-kube-api-access-5m7nz\") pod \"controller-5bddd4b946-62rrw\" (UID: \"505c55d2-998b-4df8-bd05-f129b5f8a806\") " pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.674798 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:31:37 crc kubenswrapper[4663]: I1211 10:31:37.964993 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k"] Dec 11 10:31:37 crc kubenswrapper[4663]: W1211 10:31:37.974405 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f37f2bb_6cee_40a8_a2d2_366b8ded3853.slice/crio-aa43510c0c32571aa1129be9762316b38560e44b71a5b2f07aa66c25f139fbcb WatchSource:0}: Error finding container aa43510c0c32571aa1129be9762316b38560e44b71a5b2f07aa66c25f139fbcb: Status 404 returned error can't find the container with id aa43510c0c32571aa1129be9762316b38560e44b71a5b2f07aa66c25f139fbcb Dec 11 10:31:38 crc kubenswrapper[4663]: I1211 10:31:38.024375 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-62rrw"] Dec 11 10:31:38 crc kubenswrapper[4663]: W1211 10:31:38.027051 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod505c55d2_998b_4df8_bd05_f129b5f8a806.slice/crio-deeccd7737c754212934143139e27888fbed89d69b724747f0b019e33ea7fd0b WatchSource:0}: Error finding container deeccd7737c754212934143139e27888fbed89d69b724747f0b019e33ea7fd0b: Status 404 returned error can't find the container with id deeccd7737c754212934143139e27888fbed89d69b724747f0b019e33ea7fd0b Dec 11 10:31:38 crc kubenswrapper[4663]: I1211 10:31:38.039351 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d6e35eff-30e8-40c3-a7bd-62f87d484142-memberlist\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:38 crc kubenswrapper[4663]: E1211 10:31:38.039493 4663 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 11 10:31:38 crc kubenswrapper[4663]: E1211 10:31:38.039549 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6e35eff-30e8-40c3-a7bd-62f87d484142-memberlist podName:d6e35eff-30e8-40c3-a7bd-62f87d484142 nodeName:}" failed. No retries permitted until 2025-12-11 10:31:39.039535516 +0000 UTC m=+829.857465299 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/d6e35eff-30e8-40c3-a7bd-62f87d484142-memberlist") pod "speaker-rvzrq" (UID: "d6e35eff-30e8-40c3-a7bd-62f87d484142") : secret "metallb-memberlist" not found Dec 11 10:31:38 crc kubenswrapper[4663]: I1211 10:31:38.119527 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-62rrw" event={"ID":"505c55d2-998b-4df8-bd05-f129b5f8a806","Type":"ContainerStarted","Data":"d47e5ff211d4b0cb597db9106cba9db216526d97736c2a6155aaab2d713563c0"} Dec 11 10:31:38 crc kubenswrapper[4663]: I1211 10:31:38.119564 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-62rrw" event={"ID":"505c55d2-998b-4df8-bd05-f129b5f8a806","Type":"ContainerStarted","Data":"deeccd7737c754212934143139e27888fbed89d69b724747f0b019e33ea7fd0b"} Dec 11 10:31:38 crc kubenswrapper[4663]: I1211 10:31:38.120723 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" event={"ID":"8f37f2bb-6cee-40a8-a2d2-366b8ded3853","Type":"ContainerStarted","Data":"aa43510c0c32571aa1129be9762316b38560e44b71a5b2f07aa66c25f139fbcb"} Dec 11 10:31:38 crc kubenswrapper[4663]: I1211 10:31:38.121649 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cj69x" event={"ID":"0169573a-ebd0-4a44-9181-5d156081599f","Type":"ContainerStarted","Data":"ee21c68ddb89c50abd37a8c64ddd778b79e3e30c22bc8fcdd99e4fb2c67e0b90"} Dec 11 10:31:39 crc kubenswrapper[4663]: I1211 10:31:39.050679 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d6e35eff-30e8-40c3-a7bd-62f87d484142-memberlist\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:39 crc kubenswrapper[4663]: I1211 10:31:39.055370 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d6e35eff-30e8-40c3-a7bd-62f87d484142-memberlist\") pod \"speaker-rvzrq\" (UID: \"d6e35eff-30e8-40c3-a7bd-62f87d484142\") " pod="metallb-system/speaker-rvzrq" Dec 11 10:31:39 crc kubenswrapper[4663]: I1211 10:31:39.128502 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-62rrw" event={"ID":"505c55d2-998b-4df8-bd05-f129b5f8a806","Type":"ContainerStarted","Data":"4a20c48a4b01b5a1b553b419a7eddcd1e39cf680f29aa20b822ec9a67bf18313"} Dec 11 10:31:39 crc kubenswrapper[4663]: I1211 10:31:39.128613 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:31:39 crc kubenswrapper[4663]: I1211 10:31:39.142476 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5bddd4b946-62rrw" podStartSLOduration=2.142463829 podStartE2EDuration="2.142463829s" podCreationTimestamp="2025-12-11 10:31:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:31:39.140006868 +0000 UTC m=+829.957936651" watchObservedRunningTime="2025-12-11 10:31:39.142463829 +0000 UTC m=+829.960393613" Dec 11 10:31:39 crc kubenswrapper[4663]: I1211 10:31:39.159118 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-rvzrq" Dec 11 10:31:39 crc kubenswrapper[4663]: W1211 10:31:39.173647 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6e35eff_30e8_40c3_a7bd_62f87d484142.slice/crio-8a070c990608a93da68492d52d1097d6a6c9975639d34d907533bf0b82d1a1cb WatchSource:0}: Error finding container 8a070c990608a93da68492d52d1097d6a6c9975639d34d907533bf0b82d1a1cb: Status 404 returned error can't find the container with id 8a070c990608a93da68492d52d1097d6a6c9975639d34d907533bf0b82d1a1cb Dec 11 10:31:40 crc kubenswrapper[4663]: I1211 10:31:40.139562 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rvzrq" event={"ID":"d6e35eff-30e8-40c3-a7bd-62f87d484142","Type":"ContainerStarted","Data":"d6f185b328d30c5d55aaac28662412c2e464730eb3ebf853e00d57c3a4f4180c"} Dec 11 10:31:40 crc kubenswrapper[4663]: I1211 10:31:40.139792 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rvzrq" event={"ID":"d6e35eff-30e8-40c3-a7bd-62f87d484142","Type":"ContainerStarted","Data":"ce5bae49a3963f2155cbf9f2adef87d3c884f87feb337c8e20aca12a8bad8ef3"} Dec 11 10:31:40 crc kubenswrapper[4663]: I1211 10:31:40.139807 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rvzrq" event={"ID":"d6e35eff-30e8-40c3-a7bd-62f87d484142","Type":"ContainerStarted","Data":"8a070c990608a93da68492d52d1097d6a6c9975639d34d907533bf0b82d1a1cb"} Dec 11 10:31:40 crc kubenswrapper[4663]: I1211 10:31:40.140215 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-rvzrq" Dec 11 10:31:40 crc kubenswrapper[4663]: I1211 10:31:40.153571 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-rvzrq" podStartSLOduration=3.153557687 podStartE2EDuration="3.153557687s" podCreationTimestamp="2025-12-11 10:31:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:31:40.149820909 +0000 UTC m=+830.967750693" watchObservedRunningTime="2025-12-11 10:31:40.153557687 +0000 UTC m=+830.971487471" Dec 11 10:31:44 crc kubenswrapper[4663]: I1211 10:31:44.170039 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" event={"ID":"8f37f2bb-6cee-40a8-a2d2-366b8ded3853","Type":"ContainerStarted","Data":"291aa8a132f5928a89318d477a7dac84ff1a0d5501f0e974e8b3f0e6a6863a3e"} Dec 11 10:31:44 crc kubenswrapper[4663]: I1211 10:31:44.170466 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" Dec 11 10:31:44 crc kubenswrapper[4663]: I1211 10:31:44.171668 4663 generic.go:334] "Generic (PLEG): container finished" podID="0169573a-ebd0-4a44-9181-5d156081599f" containerID="48d7c19fc2eb7aa2539b6c35928bb321813fdfdab44c7e447786de016c43fdd5" exitCode=0 Dec 11 10:31:44 crc kubenswrapper[4663]: I1211 10:31:44.171696 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cj69x" event={"ID":"0169573a-ebd0-4a44-9181-5d156081599f","Type":"ContainerDied","Data":"48d7c19fc2eb7aa2539b6c35928bb321813fdfdab44c7e447786de016c43fdd5"} Dec 11 10:31:44 crc kubenswrapper[4663]: I1211 10:31:44.182985 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" podStartSLOduration=1.537214846 podStartE2EDuration="7.182977393s" podCreationTimestamp="2025-12-11 10:31:37 +0000 UTC" firstStartedPulling="2025-12-11 10:31:37.97609603 +0000 UTC m=+828.794025814" lastFinishedPulling="2025-12-11 10:31:43.621858576 +0000 UTC m=+834.439788361" observedRunningTime="2025-12-11 10:31:44.181456853 +0000 UTC m=+834.999386637" watchObservedRunningTime="2025-12-11 10:31:44.182977393 +0000 UTC m=+835.000907176" Dec 11 10:31:45 crc kubenswrapper[4663]: I1211 10:31:45.178672 4663 generic.go:334] "Generic (PLEG): container finished" podID="0169573a-ebd0-4a44-9181-5d156081599f" containerID="a721e422e0dcdbd830f11697545ea81b3a6dc6bf5450f55b9e4b7fc2eb36902a" exitCode=0 Dec 11 10:31:45 crc kubenswrapper[4663]: I1211 10:31:45.178725 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cj69x" event={"ID":"0169573a-ebd0-4a44-9181-5d156081599f","Type":"ContainerDied","Data":"a721e422e0dcdbd830f11697545ea81b3a6dc6bf5450f55b9e4b7fc2eb36902a"} Dec 11 10:31:46 crc kubenswrapper[4663]: I1211 10:31:46.185660 4663 generic.go:334] "Generic (PLEG): container finished" podID="0169573a-ebd0-4a44-9181-5d156081599f" containerID="5895d5c31a8ce42a334d0d130d47b5394a52f579be7c8f0acc5c5406f7d095c6" exitCode=0 Dec 11 10:31:46 crc kubenswrapper[4663]: I1211 10:31:46.185697 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cj69x" event={"ID":"0169573a-ebd0-4a44-9181-5d156081599f","Type":"ContainerDied","Data":"5895d5c31a8ce42a334d0d130d47b5394a52f579be7c8f0acc5c5406f7d095c6"} Dec 11 10:31:47 crc kubenswrapper[4663]: I1211 10:31:47.193845 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cj69x" event={"ID":"0169573a-ebd0-4a44-9181-5d156081599f","Type":"ContainerStarted","Data":"779b916bcde06576b8aac8a9767df474bc5a44b858f357ec7b6bdbf6728c4157"} Dec 11 10:31:47 crc kubenswrapper[4663]: I1211 10:31:47.193881 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cj69x" event={"ID":"0169573a-ebd0-4a44-9181-5d156081599f","Type":"ContainerStarted","Data":"041bdc0b4302350d6ea7dc0149b7f347abd0b2d8e6906285fc9e431a2b06cbae"} Dec 11 10:31:47 crc kubenswrapper[4663]: I1211 10:31:47.193891 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cj69x" event={"ID":"0169573a-ebd0-4a44-9181-5d156081599f","Type":"ContainerStarted","Data":"5a68e51a07fecda13a9e87fd1916aad5eb3f6e768c2fee16d0c230e337be4e43"} Dec 11 10:31:47 crc kubenswrapper[4663]: I1211 10:31:47.193898 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cj69x" event={"ID":"0169573a-ebd0-4a44-9181-5d156081599f","Type":"ContainerStarted","Data":"66380711e5e73c485da9fa5ce8112e850bf17e4f9abfa0b21e9bf07104881b03"} Dec 11 10:31:47 crc kubenswrapper[4663]: I1211 10:31:47.193905 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cj69x" event={"ID":"0169573a-ebd0-4a44-9181-5d156081599f","Type":"ContainerStarted","Data":"f1123ddf57e8a189e695641f7c86a15d7b52ba698790c2fc8711e3a6034cc9af"} Dec 11 10:31:47 crc kubenswrapper[4663]: I1211 10:31:47.193912 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cj69x" event={"ID":"0169573a-ebd0-4a44-9181-5d156081599f","Type":"ContainerStarted","Data":"b7a4042e4920d5c473179d7e74ac9ce29bd0d76122a23f7d8bac2db2a2a12c79"} Dec 11 10:31:47 crc kubenswrapper[4663]: I1211 10:31:47.194215 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:47 crc kubenswrapper[4663]: I1211 10:31:47.208826 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-cj69x" podStartSLOduration=4.321864383 podStartE2EDuration="10.20881629s" podCreationTimestamp="2025-12-11 10:31:37 +0000 UTC" firstStartedPulling="2025-12-11 10:31:37.739196087 +0000 UTC m=+828.557125871" lastFinishedPulling="2025-12-11 10:31:43.626147994 +0000 UTC m=+834.444077778" observedRunningTime="2025-12-11 10:31:47.207920676 +0000 UTC m=+838.025850470" watchObservedRunningTime="2025-12-11 10:31:47.20881629 +0000 UTC m=+838.026746075" Dec 11 10:31:47 crc kubenswrapper[4663]: I1211 10:31:47.618263 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:47 crc kubenswrapper[4663]: I1211 10:31:47.647778 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:49 crc kubenswrapper[4663]: I1211 10:31:49.162230 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-rvzrq" Dec 11 10:31:51 crc kubenswrapper[4663]: I1211 10:31:51.518795 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-4xjs4"] Dec 11 10:31:51 crc kubenswrapper[4663]: I1211 10:31:51.519564 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4xjs4" Dec 11 10:31:51 crc kubenswrapper[4663]: I1211 10:31:51.520605 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-bkkww" Dec 11 10:31:51 crc kubenswrapper[4663]: I1211 10:31:51.521013 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 11 10:31:51 crc kubenswrapper[4663]: I1211 10:31:51.523790 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 11 10:31:51 crc kubenswrapper[4663]: I1211 10:31:51.527316 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4xjs4"] Dec 11 10:31:51 crc kubenswrapper[4663]: I1211 10:31:51.599210 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8mkg\" (UniqueName: \"kubernetes.io/projected/df732853-d2fb-4c73-a747-6c0a80a3c050-kube-api-access-f8mkg\") pod \"openstack-operator-index-4xjs4\" (UID: \"df732853-d2fb-4c73-a747-6c0a80a3c050\") " pod="openstack-operators/openstack-operator-index-4xjs4" Dec 11 10:31:51 crc kubenswrapper[4663]: I1211 10:31:51.700716 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8mkg\" (UniqueName: \"kubernetes.io/projected/df732853-d2fb-4c73-a747-6c0a80a3c050-kube-api-access-f8mkg\") pod \"openstack-operator-index-4xjs4\" (UID: \"df732853-d2fb-4c73-a747-6c0a80a3c050\") " pod="openstack-operators/openstack-operator-index-4xjs4" Dec 11 10:31:51 crc kubenswrapper[4663]: I1211 10:31:51.715439 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8mkg\" (UniqueName: \"kubernetes.io/projected/df732853-d2fb-4c73-a747-6c0a80a3c050-kube-api-access-f8mkg\") pod \"openstack-operator-index-4xjs4\" (UID: \"df732853-d2fb-4c73-a747-6c0a80a3c050\") " pod="openstack-operators/openstack-operator-index-4xjs4" Dec 11 10:31:51 crc kubenswrapper[4663]: I1211 10:31:51.837343 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4xjs4" Dec 11 10:31:52 crc kubenswrapper[4663]: I1211 10:31:52.172846 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4xjs4"] Dec 11 10:31:52 crc kubenswrapper[4663]: W1211 10:31:52.176393 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf732853_d2fb_4c73_a747_6c0a80a3c050.slice/crio-dd031eb0630e54e1446417612c57b273786f2c1c71fbaefa0cf5635347637fdc WatchSource:0}: Error finding container dd031eb0630e54e1446417612c57b273786f2c1c71fbaefa0cf5635347637fdc: Status 404 returned error can't find the container with id dd031eb0630e54e1446417612c57b273786f2c1c71fbaefa0cf5635347637fdc Dec 11 10:31:52 crc kubenswrapper[4663]: I1211 10:31:52.217107 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4xjs4" event={"ID":"df732853-d2fb-4c73-a747-6c0a80a3c050","Type":"ContainerStarted","Data":"dd031eb0630e54e1446417612c57b273786f2c1c71fbaefa0cf5635347637fdc"} Dec 11 10:31:54 crc kubenswrapper[4663]: I1211 10:31:54.905679 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-4xjs4"] Dec 11 10:31:55 crc kubenswrapper[4663]: I1211 10:31:55.519131 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-s4rfx"] Dec 11 10:31:55 crc kubenswrapper[4663]: I1211 10:31:55.519861 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-s4rfx" Dec 11 10:31:55 crc kubenswrapper[4663]: I1211 10:31:55.523556 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-s4rfx"] Dec 11 10:31:55 crc kubenswrapper[4663]: I1211 10:31:55.647240 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrptx\" (UniqueName: \"kubernetes.io/projected/4fa7b43e-65e7-4338-a694-8a7042d9fbcb-kube-api-access-lrptx\") pod \"openstack-operator-index-s4rfx\" (UID: \"4fa7b43e-65e7-4338-a694-8a7042d9fbcb\") " pod="openstack-operators/openstack-operator-index-s4rfx" Dec 11 10:31:55 crc kubenswrapper[4663]: I1211 10:31:55.748589 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrptx\" (UniqueName: \"kubernetes.io/projected/4fa7b43e-65e7-4338-a694-8a7042d9fbcb-kube-api-access-lrptx\") pod \"openstack-operator-index-s4rfx\" (UID: \"4fa7b43e-65e7-4338-a694-8a7042d9fbcb\") " pod="openstack-operators/openstack-operator-index-s4rfx" Dec 11 10:31:55 crc kubenswrapper[4663]: I1211 10:31:55.764354 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrptx\" (UniqueName: \"kubernetes.io/projected/4fa7b43e-65e7-4338-a694-8a7042d9fbcb-kube-api-access-lrptx\") pod \"openstack-operator-index-s4rfx\" (UID: \"4fa7b43e-65e7-4338-a694-8a7042d9fbcb\") " pod="openstack-operators/openstack-operator-index-s4rfx" Dec 11 10:31:55 crc kubenswrapper[4663]: I1211 10:31:55.837014 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-s4rfx" Dec 11 10:31:56 crc kubenswrapper[4663]: I1211 10:31:56.171438 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-s4rfx"] Dec 11 10:31:56 crc kubenswrapper[4663]: W1211 10:31:56.174446 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4fa7b43e_65e7_4338_a694_8a7042d9fbcb.slice/crio-8013861076be72587ffdd87b0ce8ee2e310ca9290280ce590df5ecff55099dd6 WatchSource:0}: Error finding container 8013861076be72587ffdd87b0ce8ee2e310ca9290280ce590df5ecff55099dd6: Status 404 returned error can't find the container with id 8013861076be72587ffdd87b0ce8ee2e310ca9290280ce590df5ecff55099dd6 Dec 11 10:31:56 crc kubenswrapper[4663]: I1211 10:31:56.239868 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-s4rfx" event={"ID":"4fa7b43e-65e7-4338-a694-8a7042d9fbcb","Type":"ContainerStarted","Data":"8013861076be72587ffdd87b0ce8ee2e310ca9290280ce590df5ecff55099dd6"} Dec 11 10:31:57 crc kubenswrapper[4663]: I1211 10:31:57.613631 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-8bt7k" Dec 11 10:31:57 crc kubenswrapper[4663]: I1211 10:31:57.620350 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-cj69x" Dec 11 10:31:57 crc kubenswrapper[4663]: I1211 10:31:57.678548 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5bddd4b946-62rrw" Dec 11 10:32:49 crc kubenswrapper[4663]: I1211 10:32:49.270800 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:32:49 crc kubenswrapper[4663]: I1211 10:32:49.271165 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:33:19 crc kubenswrapper[4663]: I1211 10:33:19.271326 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:33:19 crc kubenswrapper[4663]: I1211 10:33:19.271673 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:33:38 crc kubenswrapper[4663]: I1211 10:33:38.817191 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jll2t"] Dec 11 10:33:38 crc kubenswrapper[4663]: I1211 10:33:38.818505 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:38 crc kubenswrapper[4663]: I1211 10:33:38.825747 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jll2t"] Dec 11 10:33:38 crc kubenswrapper[4663]: I1211 10:33:38.874042 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22983e2d-2e94-4539-b09d-655cfc7a5097-catalog-content\") pod \"redhat-marketplace-jll2t\" (UID: \"22983e2d-2e94-4539-b09d-655cfc7a5097\") " pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:38 crc kubenswrapper[4663]: I1211 10:33:38.874119 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22983e2d-2e94-4539-b09d-655cfc7a5097-utilities\") pod \"redhat-marketplace-jll2t\" (UID: \"22983e2d-2e94-4539-b09d-655cfc7a5097\") " pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:38 crc kubenswrapper[4663]: I1211 10:33:38.874140 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvqpc\" (UniqueName: \"kubernetes.io/projected/22983e2d-2e94-4539-b09d-655cfc7a5097-kube-api-access-jvqpc\") pod \"redhat-marketplace-jll2t\" (UID: \"22983e2d-2e94-4539-b09d-655cfc7a5097\") " pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:38 crc kubenswrapper[4663]: I1211 10:33:38.974983 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvqpc\" (UniqueName: \"kubernetes.io/projected/22983e2d-2e94-4539-b09d-655cfc7a5097-kube-api-access-jvqpc\") pod \"redhat-marketplace-jll2t\" (UID: \"22983e2d-2e94-4539-b09d-655cfc7a5097\") " pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:38 crc kubenswrapper[4663]: I1211 10:33:38.975091 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22983e2d-2e94-4539-b09d-655cfc7a5097-catalog-content\") pod \"redhat-marketplace-jll2t\" (UID: \"22983e2d-2e94-4539-b09d-655cfc7a5097\") " pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:38 crc kubenswrapper[4663]: I1211 10:33:38.975193 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22983e2d-2e94-4539-b09d-655cfc7a5097-utilities\") pod \"redhat-marketplace-jll2t\" (UID: \"22983e2d-2e94-4539-b09d-655cfc7a5097\") " pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:38 crc kubenswrapper[4663]: I1211 10:33:38.975472 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22983e2d-2e94-4539-b09d-655cfc7a5097-catalog-content\") pod \"redhat-marketplace-jll2t\" (UID: \"22983e2d-2e94-4539-b09d-655cfc7a5097\") " pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:38 crc kubenswrapper[4663]: I1211 10:33:38.975534 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22983e2d-2e94-4539-b09d-655cfc7a5097-utilities\") pod \"redhat-marketplace-jll2t\" (UID: \"22983e2d-2e94-4539-b09d-655cfc7a5097\") " pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:38 crc kubenswrapper[4663]: I1211 10:33:38.989986 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvqpc\" (UniqueName: \"kubernetes.io/projected/22983e2d-2e94-4539-b09d-655cfc7a5097-kube-api-access-jvqpc\") pod \"redhat-marketplace-jll2t\" (UID: \"22983e2d-2e94-4539-b09d-655cfc7a5097\") " pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:39 crc kubenswrapper[4663]: I1211 10:33:39.131263 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:39 crc kubenswrapper[4663]: I1211 10:33:39.522994 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jll2t"] Dec 11 10:33:39 crc kubenswrapper[4663]: I1211 10:33:39.691611 4663 generic.go:334] "Generic (PLEG): container finished" podID="22983e2d-2e94-4539-b09d-655cfc7a5097" containerID="d82834971a4d77827ff35679175897329085bc4dbcbdff54bc3444613c5ba279" exitCode=0 Dec 11 10:33:39 crc kubenswrapper[4663]: I1211 10:33:39.691670 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jll2t" event={"ID":"22983e2d-2e94-4539-b09d-655cfc7a5097","Type":"ContainerDied","Data":"d82834971a4d77827ff35679175897329085bc4dbcbdff54bc3444613c5ba279"} Dec 11 10:33:39 crc kubenswrapper[4663]: I1211 10:33:39.692089 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jll2t" event={"ID":"22983e2d-2e94-4539-b09d-655cfc7a5097","Type":"ContainerStarted","Data":"2d714e846d30a32b6264b8b755585ffd1ff52759bc7da73ba83d8bc6f6f3fb77"} Dec 11 10:33:39 crc kubenswrapper[4663]: I1211 10:33:39.693264 4663 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:33:40 crc kubenswrapper[4663]: I1211 10:33:40.698683 4663 generic.go:334] "Generic (PLEG): container finished" podID="22983e2d-2e94-4539-b09d-655cfc7a5097" containerID="cb18d1b27f6aa136edf2501b81934145c6c13ac598e0af2352176506b9fe7c70" exitCode=0 Dec 11 10:33:40 crc kubenswrapper[4663]: I1211 10:33:40.698717 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jll2t" event={"ID":"22983e2d-2e94-4539-b09d-655cfc7a5097","Type":"ContainerDied","Data":"cb18d1b27f6aa136edf2501b81934145c6c13ac598e0af2352176506b9fe7c70"} Dec 11 10:33:41 crc kubenswrapper[4663]: I1211 10:33:41.704888 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jll2t" event={"ID":"22983e2d-2e94-4539-b09d-655cfc7a5097","Type":"ContainerStarted","Data":"e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195"} Dec 11 10:33:41 crc kubenswrapper[4663]: I1211 10:33:41.718370 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jll2t" podStartSLOduration=2.009381757 podStartE2EDuration="3.718358882s" podCreationTimestamp="2025-12-11 10:33:38 +0000 UTC" firstStartedPulling="2025-12-11 10:33:39.693033849 +0000 UTC m=+950.510963633" lastFinishedPulling="2025-12-11 10:33:41.402010974 +0000 UTC m=+952.219940758" observedRunningTime="2025-12-11 10:33:41.715821421 +0000 UTC m=+952.533751205" watchObservedRunningTime="2025-12-11 10:33:41.718358882 +0000 UTC m=+952.536288656" Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.131944 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.132272 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.158752 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.270769 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.270816 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.270852 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.271323 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8c8254fc38982caf8a920e34b1f8132d5bd94723c9a1ecbeeb5eec5817ed54be"} pod="openshift-machine-config-operator/machine-config-daemon-mq524" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.271380 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" containerID="cri-o://8c8254fc38982caf8a920e34b1f8132d5bd94723c9a1ecbeeb5eec5817ed54be" gracePeriod=600 Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.743288 4663 generic.go:334] "Generic (PLEG): container finished" podID="98941020-2764-490d-81db-9decd21aaa13" containerID="8c8254fc38982caf8a920e34b1f8132d5bd94723c9a1ecbeeb5eec5817ed54be" exitCode=0 Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.743357 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerDied","Data":"8c8254fc38982caf8a920e34b1f8132d5bd94723c9a1ecbeeb5eec5817ed54be"} Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.743679 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerStarted","Data":"c43fdb9398afc1e364205f1d182147a1fb7e384c431a0fe48c53bca2dc7a3515"} Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.743696 4663 scope.go:117] "RemoveContainer" containerID="d6a383f638ba877808ee9ae4a0f108f5e0f431193b4b9388090fdfc37a56b27f" Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.775731 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:49 crc kubenswrapper[4663]: I1211 10:33:49.813657 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jll2t"] Dec 11 10:33:51 crc kubenswrapper[4663]: I1211 10:33:51.754112 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jll2t" podUID="22983e2d-2e94-4539-b09d-655cfc7a5097" containerName="registry-server" containerID="cri-o://e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195" gracePeriod=2 Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.055587 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:52 crc kubenswrapper[4663]: E1211 10:33:52.181353 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:33:52 crc kubenswrapper[4663]: E1211 10:33:52.181401 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:33:52 crc kubenswrapper[4663]: E1211 10:33:52.181522 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-f8mkg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-4xjs4_openstack-operators(df732853-d2fb-4c73-a747-6c0a80a3c050): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" logger="UnhandledError" Dec 11 10:33:52 crc kubenswrapper[4663]: E1211 10:33:52.182746 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \\\"http://38.129.56.99:5001/v2/\\\": dial tcp 38.129.56.99:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-4xjs4" podUID="df732853-d2fb-4c73-a747-6c0a80a3c050" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.218985 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22983e2d-2e94-4539-b09d-655cfc7a5097-catalog-content\") pod \"22983e2d-2e94-4539-b09d-655cfc7a5097\" (UID: \"22983e2d-2e94-4539-b09d-655cfc7a5097\") " Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.219032 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvqpc\" (UniqueName: \"kubernetes.io/projected/22983e2d-2e94-4539-b09d-655cfc7a5097-kube-api-access-jvqpc\") pod \"22983e2d-2e94-4539-b09d-655cfc7a5097\" (UID: \"22983e2d-2e94-4539-b09d-655cfc7a5097\") " Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.219155 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22983e2d-2e94-4539-b09d-655cfc7a5097-utilities\") pod \"22983e2d-2e94-4539-b09d-655cfc7a5097\" (UID: \"22983e2d-2e94-4539-b09d-655cfc7a5097\") " Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.219690 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22983e2d-2e94-4539-b09d-655cfc7a5097-utilities" (OuterVolumeSpecName: "utilities") pod "22983e2d-2e94-4539-b09d-655cfc7a5097" (UID: "22983e2d-2e94-4539-b09d-655cfc7a5097"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.223306 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22983e2d-2e94-4539-b09d-655cfc7a5097-kube-api-access-jvqpc" (OuterVolumeSpecName: "kube-api-access-jvqpc") pod "22983e2d-2e94-4539-b09d-655cfc7a5097" (UID: "22983e2d-2e94-4539-b09d-655cfc7a5097"). InnerVolumeSpecName "kube-api-access-jvqpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.233267 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22983e2d-2e94-4539-b09d-655cfc7a5097-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "22983e2d-2e94-4539-b09d-655cfc7a5097" (UID: "22983e2d-2e94-4539-b09d-655cfc7a5097"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.320218 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvqpc\" (UniqueName: \"kubernetes.io/projected/22983e2d-2e94-4539-b09d-655cfc7a5097-kube-api-access-jvqpc\") on node \"crc\" DevicePath \"\"" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.320238 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22983e2d-2e94-4539-b09d-655cfc7a5097-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.320247 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22983e2d-2e94-4539-b09d-655cfc7a5097-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.385953 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c92f6"] Dec 11 10:33:52 crc kubenswrapper[4663]: E1211 10:33:52.386156 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22983e2d-2e94-4539-b09d-655cfc7a5097" containerName="extract-content" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.386171 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="22983e2d-2e94-4539-b09d-655cfc7a5097" containerName="extract-content" Dec 11 10:33:52 crc kubenswrapper[4663]: E1211 10:33:52.386195 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22983e2d-2e94-4539-b09d-655cfc7a5097" containerName="registry-server" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.386201 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="22983e2d-2e94-4539-b09d-655cfc7a5097" containerName="registry-server" Dec 11 10:33:52 crc kubenswrapper[4663]: E1211 10:33:52.386212 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22983e2d-2e94-4539-b09d-655cfc7a5097" containerName="extract-utilities" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.386219 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="22983e2d-2e94-4539-b09d-655cfc7a5097" containerName="extract-utilities" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.386343 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="22983e2d-2e94-4539-b09d-655cfc7a5097" containerName="registry-server" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.387029 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.412575 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c92f6"] Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.421598 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15171ea0-4465-4197-b101-d9d44b0b708d-catalog-content\") pod \"certified-operators-c92f6\" (UID: \"15171ea0-4465-4197-b101-d9d44b0b708d\") " pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.421654 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15171ea0-4465-4197-b101-d9d44b0b708d-utilities\") pod \"certified-operators-c92f6\" (UID: \"15171ea0-4465-4197-b101-d9d44b0b708d\") " pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.421706 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbbgs\" (UniqueName: \"kubernetes.io/projected/15171ea0-4465-4197-b101-d9d44b0b708d-kube-api-access-vbbgs\") pod \"certified-operators-c92f6\" (UID: \"15171ea0-4465-4197-b101-d9d44b0b708d\") " pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.522452 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15171ea0-4465-4197-b101-d9d44b0b708d-utilities\") pod \"certified-operators-c92f6\" (UID: \"15171ea0-4465-4197-b101-d9d44b0b708d\") " pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.522521 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbbgs\" (UniqueName: \"kubernetes.io/projected/15171ea0-4465-4197-b101-d9d44b0b708d-kube-api-access-vbbgs\") pod \"certified-operators-c92f6\" (UID: \"15171ea0-4465-4197-b101-d9d44b0b708d\") " pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.522909 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15171ea0-4465-4197-b101-d9d44b0b708d-utilities\") pod \"certified-operators-c92f6\" (UID: \"15171ea0-4465-4197-b101-d9d44b0b708d\") " pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.522992 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15171ea0-4465-4197-b101-d9d44b0b708d-catalog-content\") pod \"certified-operators-c92f6\" (UID: \"15171ea0-4465-4197-b101-d9d44b0b708d\") " pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.523246 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15171ea0-4465-4197-b101-d9d44b0b708d-catalog-content\") pod \"certified-operators-c92f6\" (UID: \"15171ea0-4465-4197-b101-d9d44b0b708d\") " pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.536487 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbbgs\" (UniqueName: \"kubernetes.io/projected/15171ea0-4465-4197-b101-d9d44b0b708d-kube-api-access-vbbgs\") pod \"certified-operators-c92f6\" (UID: \"15171ea0-4465-4197-b101-d9d44b0b708d\") " pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.708518 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.761270 4663 generic.go:334] "Generic (PLEG): container finished" podID="22983e2d-2e94-4539-b09d-655cfc7a5097" containerID="e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195" exitCode=0 Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.761455 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jll2t" event={"ID":"22983e2d-2e94-4539-b09d-655cfc7a5097","Type":"ContainerDied","Data":"e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195"} Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.761802 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jll2t" event={"ID":"22983e2d-2e94-4539-b09d-655cfc7a5097","Type":"ContainerDied","Data":"2d714e846d30a32b6264b8b755585ffd1ff52759bc7da73ba83d8bc6f6f3fb77"} Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.761824 4663 scope.go:117] "RemoveContainer" containerID="e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.761545 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jll2t" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.790450 4663 scope.go:117] "RemoveContainer" containerID="cb18d1b27f6aa136edf2501b81934145c6c13ac598e0af2352176506b9fe7c70" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.811908 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jll2t"] Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.815829 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jll2t"] Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.826945 4663 scope.go:117] "RemoveContainer" containerID="d82834971a4d77827ff35679175897329085bc4dbcbdff54bc3444613c5ba279" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.861242 4663 scope.go:117] "RemoveContainer" containerID="e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195" Dec 11 10:33:52 crc kubenswrapper[4663]: E1211 10:33:52.861773 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195\": container with ID starting with e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195 not found: ID does not exist" containerID="e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.861813 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195"} err="failed to get container status \"e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195\": rpc error: code = NotFound desc = could not find container \"e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195\": container with ID starting with e4f83a2bb37aafa70b07ffa0b5f9d366c05ff4ca35ac95fb3947c63854d16195 not found: ID does not exist" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.861836 4663 scope.go:117] "RemoveContainer" containerID="cb18d1b27f6aa136edf2501b81934145c6c13ac598e0af2352176506b9fe7c70" Dec 11 10:33:52 crc kubenswrapper[4663]: E1211 10:33:52.862129 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb18d1b27f6aa136edf2501b81934145c6c13ac598e0af2352176506b9fe7c70\": container with ID starting with cb18d1b27f6aa136edf2501b81934145c6c13ac598e0af2352176506b9fe7c70 not found: ID does not exist" containerID="cb18d1b27f6aa136edf2501b81934145c6c13ac598e0af2352176506b9fe7c70" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.862150 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb18d1b27f6aa136edf2501b81934145c6c13ac598e0af2352176506b9fe7c70"} err="failed to get container status \"cb18d1b27f6aa136edf2501b81934145c6c13ac598e0af2352176506b9fe7c70\": rpc error: code = NotFound desc = could not find container \"cb18d1b27f6aa136edf2501b81934145c6c13ac598e0af2352176506b9fe7c70\": container with ID starting with cb18d1b27f6aa136edf2501b81934145c6c13ac598e0af2352176506b9fe7c70 not found: ID does not exist" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.862162 4663 scope.go:117] "RemoveContainer" containerID="d82834971a4d77827ff35679175897329085bc4dbcbdff54bc3444613c5ba279" Dec 11 10:33:52 crc kubenswrapper[4663]: E1211 10:33:52.862540 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d82834971a4d77827ff35679175897329085bc4dbcbdff54bc3444613c5ba279\": container with ID starting with d82834971a4d77827ff35679175897329085bc4dbcbdff54bc3444613c5ba279 not found: ID does not exist" containerID="d82834971a4d77827ff35679175897329085bc4dbcbdff54bc3444613c5ba279" Dec 11 10:33:52 crc kubenswrapper[4663]: I1211 10:33:52.862560 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d82834971a4d77827ff35679175897329085bc4dbcbdff54bc3444613c5ba279"} err="failed to get container status \"d82834971a4d77827ff35679175897329085bc4dbcbdff54bc3444613c5ba279\": rpc error: code = NotFound desc = could not find container \"d82834971a4d77827ff35679175897329085bc4dbcbdff54bc3444613c5ba279\": container with ID starting with d82834971a4d77827ff35679175897329085bc4dbcbdff54bc3444613c5ba279 not found: ID does not exist" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.079533 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4xjs4" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.146978 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c92f6"] Dec 11 10:33:53 crc kubenswrapper[4663]: W1211 10:33:53.150749 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15171ea0_4465_4197_b101_d9d44b0b708d.slice/crio-09ec192db7f5444728224e5ca182fbc0718e06c79a0029ce4ec760639f92251b WatchSource:0}: Error finding container 09ec192db7f5444728224e5ca182fbc0718e06c79a0029ce4ec760639f92251b: Status 404 returned error can't find the container with id 09ec192db7f5444728224e5ca182fbc0718e06c79a0029ce4ec760639f92251b Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.232010 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8mkg\" (UniqueName: \"kubernetes.io/projected/df732853-d2fb-4c73-a747-6c0a80a3c050-kube-api-access-f8mkg\") pod \"df732853-d2fb-4c73-a747-6c0a80a3c050\" (UID: \"df732853-d2fb-4c73-a747-6c0a80a3c050\") " Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.235753 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df732853-d2fb-4c73-a747-6c0a80a3c050-kube-api-access-f8mkg" (OuterVolumeSpecName: "kube-api-access-f8mkg") pod "df732853-d2fb-4c73-a747-6c0a80a3c050" (UID: "df732853-d2fb-4c73-a747-6c0a80a3c050"). InnerVolumeSpecName "kube-api-access-f8mkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.333123 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8mkg\" (UniqueName: \"kubernetes.io/projected/df732853-d2fb-4c73-a747-6c0a80a3c050-kube-api-access-f8mkg\") on node \"crc\" DevicePath \"\"" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.384790 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-898zs"] Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.385835 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-898zs" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.391829 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-898zs"] Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.433980 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9677ab30-b776-48c5-b0f7-434f44ed5677-catalog-content\") pod \"community-operators-898zs\" (UID: \"9677ab30-b776-48c5-b0f7-434f44ed5677\") " pod="openshift-marketplace/community-operators-898zs" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.434023 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m78vq\" (UniqueName: \"kubernetes.io/projected/9677ab30-b776-48c5-b0f7-434f44ed5677-kube-api-access-m78vq\") pod \"community-operators-898zs\" (UID: \"9677ab30-b776-48c5-b0f7-434f44ed5677\") " pod="openshift-marketplace/community-operators-898zs" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.434233 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9677ab30-b776-48c5-b0f7-434f44ed5677-utilities\") pod \"community-operators-898zs\" (UID: \"9677ab30-b776-48c5-b0f7-434f44ed5677\") " pod="openshift-marketplace/community-operators-898zs" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.510953 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22983e2d-2e94-4539-b09d-655cfc7a5097" path="/var/lib/kubelet/pods/22983e2d-2e94-4539-b09d-655cfc7a5097/volumes" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.535378 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9677ab30-b776-48c5-b0f7-434f44ed5677-utilities\") pod \"community-operators-898zs\" (UID: \"9677ab30-b776-48c5-b0f7-434f44ed5677\") " pod="openshift-marketplace/community-operators-898zs" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.535470 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9677ab30-b776-48c5-b0f7-434f44ed5677-catalog-content\") pod \"community-operators-898zs\" (UID: \"9677ab30-b776-48c5-b0f7-434f44ed5677\") " pod="openshift-marketplace/community-operators-898zs" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.535502 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m78vq\" (UniqueName: \"kubernetes.io/projected/9677ab30-b776-48c5-b0f7-434f44ed5677-kube-api-access-m78vq\") pod \"community-operators-898zs\" (UID: \"9677ab30-b776-48c5-b0f7-434f44ed5677\") " pod="openshift-marketplace/community-operators-898zs" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.535822 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9677ab30-b776-48c5-b0f7-434f44ed5677-utilities\") pod \"community-operators-898zs\" (UID: \"9677ab30-b776-48c5-b0f7-434f44ed5677\") " pod="openshift-marketplace/community-operators-898zs" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.535916 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9677ab30-b776-48c5-b0f7-434f44ed5677-catalog-content\") pod \"community-operators-898zs\" (UID: \"9677ab30-b776-48c5-b0f7-434f44ed5677\") " pod="openshift-marketplace/community-operators-898zs" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.550245 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m78vq\" (UniqueName: \"kubernetes.io/projected/9677ab30-b776-48c5-b0f7-434f44ed5677-kube-api-access-m78vq\") pod \"community-operators-898zs\" (UID: \"9677ab30-b776-48c5-b0f7-434f44ed5677\") " pod="openshift-marketplace/community-operators-898zs" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.696088 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-898zs" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.770620 4663 generic.go:334] "Generic (PLEG): container finished" podID="15171ea0-4465-4197-b101-d9d44b0b708d" containerID="977b4f7683abc9460cbd2f58e96a53ecde9745443bc8cf701ac07e3f5c9237b8" exitCode=0 Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.770700 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c92f6" event={"ID":"15171ea0-4465-4197-b101-d9d44b0b708d","Type":"ContainerDied","Data":"977b4f7683abc9460cbd2f58e96a53ecde9745443bc8cf701ac07e3f5c9237b8"} Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.770725 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c92f6" event={"ID":"15171ea0-4465-4197-b101-d9d44b0b708d","Type":"ContainerStarted","Data":"09ec192db7f5444728224e5ca182fbc0718e06c79a0029ce4ec760639f92251b"} Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.772219 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4xjs4" event={"ID":"df732853-d2fb-4c73-a747-6c0a80a3c050","Type":"ContainerDied","Data":"dd031eb0630e54e1446417612c57b273786f2c1c71fbaefa0cf5635347637fdc"} Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.772276 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4xjs4" Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.805315 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-4xjs4"] Dec 11 10:33:53 crc kubenswrapper[4663]: I1211 10:33:53.809341 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-4xjs4"] Dec 11 10:33:54 crc kubenswrapper[4663]: I1211 10:33:54.050329 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-898zs"] Dec 11 10:33:54 crc kubenswrapper[4663]: W1211 10:33:54.054281 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9677ab30_b776_48c5_b0f7_434f44ed5677.slice/crio-2312a88c3d8fc8ed9dbe6e7b8a02707b3282262086fef5e81c4a4c2060db6aae WatchSource:0}: Error finding container 2312a88c3d8fc8ed9dbe6e7b8a02707b3282262086fef5e81c4a4c2060db6aae: Status 404 returned error can't find the container with id 2312a88c3d8fc8ed9dbe6e7b8a02707b3282262086fef5e81c4a4c2060db6aae Dec 11 10:33:54 crc kubenswrapper[4663]: I1211 10:33:54.778754 4663 generic.go:334] "Generic (PLEG): container finished" podID="9677ab30-b776-48c5-b0f7-434f44ed5677" containerID="e5a39da0dd6270f52dfff528f85ae2e35ddf9c7564da632bcfe3baa7afb6aa2b" exitCode=0 Dec 11 10:33:54 crc kubenswrapper[4663]: I1211 10:33:54.778790 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-898zs" event={"ID":"9677ab30-b776-48c5-b0f7-434f44ed5677","Type":"ContainerDied","Data":"e5a39da0dd6270f52dfff528f85ae2e35ddf9c7564da632bcfe3baa7afb6aa2b"} Dec 11 10:33:54 crc kubenswrapper[4663]: I1211 10:33:54.778813 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-898zs" event={"ID":"9677ab30-b776-48c5-b0f7-434f44ed5677","Type":"ContainerStarted","Data":"2312a88c3d8fc8ed9dbe6e7b8a02707b3282262086fef5e81c4a4c2060db6aae"} Dec 11 10:33:55 crc kubenswrapper[4663]: I1211 10:33:55.518163 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df732853-d2fb-4c73-a747-6c0a80a3c050" path="/var/lib/kubelet/pods/df732853-d2fb-4c73-a747-6c0a80a3c050/volumes" Dec 11 10:33:55 crc kubenswrapper[4663]: I1211 10:33:55.785618 4663 generic.go:334] "Generic (PLEG): container finished" podID="15171ea0-4465-4197-b101-d9d44b0b708d" containerID="b709cf675f3519eac2711feb1e50e35509899b2900036e905ec1c03b54da8a75" exitCode=0 Dec 11 10:33:55 crc kubenswrapper[4663]: I1211 10:33:55.785691 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c92f6" event={"ID":"15171ea0-4465-4197-b101-d9d44b0b708d","Type":"ContainerDied","Data":"b709cf675f3519eac2711feb1e50e35509899b2900036e905ec1c03b54da8a75"} Dec 11 10:33:55 crc kubenswrapper[4663]: I1211 10:33:55.788508 4663 generic.go:334] "Generic (PLEG): container finished" podID="9677ab30-b776-48c5-b0f7-434f44ed5677" containerID="fb2240237800a69dc1095fbcf1c4e8ad911fbca439a7eba783f4886235f39c3b" exitCode=0 Dec 11 10:33:55 crc kubenswrapper[4663]: I1211 10:33:55.788542 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-898zs" event={"ID":"9677ab30-b776-48c5-b0f7-434f44ed5677","Type":"ContainerDied","Data":"fb2240237800a69dc1095fbcf1c4e8ad911fbca439a7eba783f4886235f39c3b"} Dec 11 10:33:56 crc kubenswrapper[4663]: E1211 10:33:56.179972 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:33:56 crc kubenswrapper[4663]: E1211 10:33:56.180023 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:33:56 crc kubenswrapper[4663]: E1211 10:33:56.180133 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lrptx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-s4rfx_openstack-operators(4fa7b43e-65e7-4338-a694-8a7042d9fbcb): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" logger="UnhandledError" Dec 11 10:33:56 crc kubenswrapper[4663]: E1211 10:33:56.181999 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \\\"http://38.129.56.99:5001/v2/\\\": dial tcp 38.129.56.99:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:33:56 crc kubenswrapper[4663]: I1211 10:33:56.800178 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c92f6" event={"ID":"15171ea0-4465-4197-b101-d9d44b0b708d","Type":"ContainerStarted","Data":"069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3"} Dec 11 10:33:56 crc kubenswrapper[4663]: I1211 10:33:56.803606 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-898zs" event={"ID":"9677ab30-b776-48c5-b0f7-434f44ed5677","Type":"ContainerStarted","Data":"05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331"} Dec 11 10:33:56 crc kubenswrapper[4663]: E1211 10:33:56.804339 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:33:56 crc kubenswrapper[4663]: I1211 10:33:56.815881 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c92f6" podStartSLOduration=2.243794972 podStartE2EDuration="4.815870409s" podCreationTimestamp="2025-12-11 10:33:52 +0000 UTC" firstStartedPulling="2025-12-11 10:33:53.771750418 +0000 UTC m=+964.589680202" lastFinishedPulling="2025-12-11 10:33:56.343825855 +0000 UTC m=+967.161755639" observedRunningTime="2025-12-11 10:33:56.814662518 +0000 UTC m=+967.632592291" watchObservedRunningTime="2025-12-11 10:33:56.815870409 +0000 UTC m=+967.633800193" Dec 11 10:33:56 crc kubenswrapper[4663]: I1211 10:33:56.841588 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-898zs" podStartSLOduration=2.003356462 podStartE2EDuration="3.841572746s" podCreationTimestamp="2025-12-11 10:33:53 +0000 UTC" firstStartedPulling="2025-12-11 10:33:54.781043729 +0000 UTC m=+965.598973513" lastFinishedPulling="2025-12-11 10:33:56.619260002 +0000 UTC m=+967.437189797" observedRunningTime="2025-12-11 10:33:56.837483188 +0000 UTC m=+967.655412971" watchObservedRunningTime="2025-12-11 10:33:56.841572746 +0000 UTC m=+967.659502530" Dec 11 10:34:02 crc kubenswrapper[4663]: I1211 10:34:02.709538 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:34:02 crc kubenswrapper[4663]: I1211 10:34:02.710441 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:34:02 crc kubenswrapper[4663]: I1211 10:34:02.738328 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:34:02 crc kubenswrapper[4663]: I1211 10:34:02.856608 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:34:02 crc kubenswrapper[4663]: I1211 10:34:02.958717 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c92f6"] Dec 11 10:34:03 crc kubenswrapper[4663]: I1211 10:34:03.696493 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-898zs" Dec 11 10:34:03 crc kubenswrapper[4663]: I1211 10:34:03.696530 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-898zs" Dec 11 10:34:03 crc kubenswrapper[4663]: I1211 10:34:03.723824 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-898zs" Dec 11 10:34:03 crc kubenswrapper[4663]: I1211 10:34:03.858529 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-898zs" Dec 11 10:34:04 crc kubenswrapper[4663]: I1211 10:34:04.837282 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c92f6" podUID="15171ea0-4465-4197-b101-d9d44b0b708d" containerName="registry-server" containerID="cri-o://069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3" gracePeriod=2 Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.163730 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.272489 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15171ea0-4465-4197-b101-d9d44b0b708d-catalog-content\") pod \"15171ea0-4465-4197-b101-d9d44b0b708d\" (UID: \"15171ea0-4465-4197-b101-d9d44b0b708d\") " Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.273116 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15171ea0-4465-4197-b101-d9d44b0b708d-utilities\") pod \"15171ea0-4465-4197-b101-d9d44b0b708d\" (UID: \"15171ea0-4465-4197-b101-d9d44b0b708d\") " Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.273209 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbbgs\" (UniqueName: \"kubernetes.io/projected/15171ea0-4465-4197-b101-d9d44b0b708d-kube-api-access-vbbgs\") pod \"15171ea0-4465-4197-b101-d9d44b0b708d\" (UID: \"15171ea0-4465-4197-b101-d9d44b0b708d\") " Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.273756 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15171ea0-4465-4197-b101-d9d44b0b708d-utilities" (OuterVolumeSpecName: "utilities") pod "15171ea0-4465-4197-b101-d9d44b0b708d" (UID: "15171ea0-4465-4197-b101-d9d44b0b708d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.277323 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15171ea0-4465-4197-b101-d9d44b0b708d-kube-api-access-vbbgs" (OuterVolumeSpecName: "kube-api-access-vbbgs") pod "15171ea0-4465-4197-b101-d9d44b0b708d" (UID: "15171ea0-4465-4197-b101-d9d44b0b708d"). InnerVolumeSpecName "kube-api-access-vbbgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.311701 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15171ea0-4465-4197-b101-d9d44b0b708d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "15171ea0-4465-4197-b101-d9d44b0b708d" (UID: "15171ea0-4465-4197-b101-d9d44b0b708d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.357877 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-898zs"] Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.374555 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15171ea0-4465-4197-b101-d9d44b0b708d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.374579 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15171ea0-4465-4197-b101-d9d44b0b708d-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.374590 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbbgs\" (UniqueName: \"kubernetes.io/projected/15171ea0-4465-4197-b101-d9d44b0b708d-kube-api-access-vbbgs\") on node \"crc\" DevicePath \"\"" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.843568 4663 generic.go:334] "Generic (PLEG): container finished" podID="15171ea0-4465-4197-b101-d9d44b0b708d" containerID="069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3" exitCode=0 Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.843614 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c92f6" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.843627 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c92f6" event={"ID":"15171ea0-4465-4197-b101-d9d44b0b708d","Type":"ContainerDied","Data":"069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3"} Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.843889 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c92f6" event={"ID":"15171ea0-4465-4197-b101-d9d44b0b708d","Type":"ContainerDied","Data":"09ec192db7f5444728224e5ca182fbc0718e06c79a0029ce4ec760639f92251b"} Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.843905 4663 scope.go:117] "RemoveContainer" containerID="069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.844100 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-898zs" podUID="9677ab30-b776-48c5-b0f7-434f44ed5677" containerName="registry-server" containerID="cri-o://05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331" gracePeriod=2 Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.857297 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c92f6"] Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.862201 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c92f6"] Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.862337 4663 scope.go:117] "RemoveContainer" containerID="b709cf675f3519eac2711feb1e50e35509899b2900036e905ec1c03b54da8a75" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.875386 4663 scope.go:117] "RemoveContainer" containerID="977b4f7683abc9460cbd2f58e96a53ecde9745443bc8cf701ac07e3f5c9237b8" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.972132 4663 scope.go:117] "RemoveContainer" containerID="069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3" Dec 11 10:34:05 crc kubenswrapper[4663]: E1211 10:34:05.972496 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3\": container with ID starting with 069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3 not found: ID does not exist" containerID="069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.972524 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3"} err="failed to get container status \"069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3\": rpc error: code = NotFound desc = could not find container \"069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3\": container with ID starting with 069c646411de357b79387cef3792849a67326380b6ff984a19f6b847e2c1a0d3 not found: ID does not exist" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.972543 4663 scope.go:117] "RemoveContainer" containerID="b709cf675f3519eac2711feb1e50e35509899b2900036e905ec1c03b54da8a75" Dec 11 10:34:05 crc kubenswrapper[4663]: E1211 10:34:05.972858 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b709cf675f3519eac2711feb1e50e35509899b2900036e905ec1c03b54da8a75\": container with ID starting with b709cf675f3519eac2711feb1e50e35509899b2900036e905ec1c03b54da8a75 not found: ID does not exist" containerID="b709cf675f3519eac2711feb1e50e35509899b2900036e905ec1c03b54da8a75" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.972880 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b709cf675f3519eac2711feb1e50e35509899b2900036e905ec1c03b54da8a75"} err="failed to get container status \"b709cf675f3519eac2711feb1e50e35509899b2900036e905ec1c03b54da8a75\": rpc error: code = NotFound desc = could not find container \"b709cf675f3519eac2711feb1e50e35509899b2900036e905ec1c03b54da8a75\": container with ID starting with b709cf675f3519eac2711feb1e50e35509899b2900036e905ec1c03b54da8a75 not found: ID does not exist" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.972893 4663 scope.go:117] "RemoveContainer" containerID="977b4f7683abc9460cbd2f58e96a53ecde9745443bc8cf701ac07e3f5c9237b8" Dec 11 10:34:05 crc kubenswrapper[4663]: E1211 10:34:05.973111 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"977b4f7683abc9460cbd2f58e96a53ecde9745443bc8cf701ac07e3f5c9237b8\": container with ID starting with 977b4f7683abc9460cbd2f58e96a53ecde9745443bc8cf701ac07e3f5c9237b8 not found: ID does not exist" containerID="977b4f7683abc9460cbd2f58e96a53ecde9745443bc8cf701ac07e3f5c9237b8" Dec 11 10:34:05 crc kubenswrapper[4663]: I1211 10:34:05.973131 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"977b4f7683abc9460cbd2f58e96a53ecde9745443bc8cf701ac07e3f5c9237b8"} err="failed to get container status \"977b4f7683abc9460cbd2f58e96a53ecde9745443bc8cf701ac07e3f5c9237b8\": rpc error: code = NotFound desc = could not find container \"977b4f7683abc9460cbd2f58e96a53ecde9745443bc8cf701ac07e3f5c9237b8\": container with ID starting with 977b4f7683abc9460cbd2f58e96a53ecde9745443bc8cf701ac07e3f5c9237b8 not found: ID does not exist" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.152705 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-898zs" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.184281 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m78vq\" (UniqueName: \"kubernetes.io/projected/9677ab30-b776-48c5-b0f7-434f44ed5677-kube-api-access-m78vq\") pod \"9677ab30-b776-48c5-b0f7-434f44ed5677\" (UID: \"9677ab30-b776-48c5-b0f7-434f44ed5677\") " Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.184322 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9677ab30-b776-48c5-b0f7-434f44ed5677-catalog-content\") pod \"9677ab30-b776-48c5-b0f7-434f44ed5677\" (UID: \"9677ab30-b776-48c5-b0f7-434f44ed5677\") " Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.184417 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9677ab30-b776-48c5-b0f7-434f44ed5677-utilities\") pod \"9677ab30-b776-48c5-b0f7-434f44ed5677\" (UID: \"9677ab30-b776-48c5-b0f7-434f44ed5677\") " Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.185230 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9677ab30-b776-48c5-b0f7-434f44ed5677-utilities" (OuterVolumeSpecName: "utilities") pod "9677ab30-b776-48c5-b0f7-434f44ed5677" (UID: "9677ab30-b776-48c5-b0f7-434f44ed5677"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.187891 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9677ab30-b776-48c5-b0f7-434f44ed5677-kube-api-access-m78vq" (OuterVolumeSpecName: "kube-api-access-m78vq") pod "9677ab30-b776-48c5-b0f7-434f44ed5677" (UID: "9677ab30-b776-48c5-b0f7-434f44ed5677"). InnerVolumeSpecName "kube-api-access-m78vq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.221811 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9677ab30-b776-48c5-b0f7-434f44ed5677-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9677ab30-b776-48c5-b0f7-434f44ed5677" (UID: "9677ab30-b776-48c5-b0f7-434f44ed5677"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.286337 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9677ab30-b776-48c5-b0f7-434f44ed5677-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.286362 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m78vq\" (UniqueName: \"kubernetes.io/projected/9677ab30-b776-48c5-b0f7-434f44ed5677-kube-api-access-m78vq\") on node \"crc\" DevicePath \"\"" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.286373 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9677ab30-b776-48c5-b0f7-434f44ed5677-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.849744 4663 generic.go:334] "Generic (PLEG): container finished" podID="9677ab30-b776-48c5-b0f7-434f44ed5677" containerID="05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331" exitCode=0 Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.849777 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-898zs" event={"ID":"9677ab30-b776-48c5-b0f7-434f44ed5677","Type":"ContainerDied","Data":"05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331"} Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.849786 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-898zs" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.849825 4663 scope.go:117] "RemoveContainer" containerID="05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.849814 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-898zs" event={"ID":"9677ab30-b776-48c5-b0f7-434f44ed5677","Type":"ContainerDied","Data":"2312a88c3d8fc8ed9dbe6e7b8a02707b3282262086fef5e81c4a4c2060db6aae"} Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.862876 4663 scope.go:117] "RemoveContainer" containerID="fb2240237800a69dc1095fbcf1c4e8ad911fbca439a7eba783f4886235f39c3b" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.870414 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-898zs"] Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.873895 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-898zs"] Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.891048 4663 scope.go:117] "RemoveContainer" containerID="e5a39da0dd6270f52dfff528f85ae2e35ddf9c7564da632bcfe3baa7afb6aa2b" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.901563 4663 scope.go:117] "RemoveContainer" containerID="05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331" Dec 11 10:34:06 crc kubenswrapper[4663]: E1211 10:34:06.901882 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331\": container with ID starting with 05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331 not found: ID does not exist" containerID="05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.901912 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331"} err="failed to get container status \"05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331\": rpc error: code = NotFound desc = could not find container \"05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331\": container with ID starting with 05a85bebfabe6961df8e86792f0065851eb8d5cc62c962f62a53759d572e6331 not found: ID does not exist" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.901931 4663 scope.go:117] "RemoveContainer" containerID="fb2240237800a69dc1095fbcf1c4e8ad911fbca439a7eba783f4886235f39c3b" Dec 11 10:34:06 crc kubenswrapper[4663]: E1211 10:34:06.902164 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb2240237800a69dc1095fbcf1c4e8ad911fbca439a7eba783f4886235f39c3b\": container with ID starting with fb2240237800a69dc1095fbcf1c4e8ad911fbca439a7eba783f4886235f39c3b not found: ID does not exist" containerID="fb2240237800a69dc1095fbcf1c4e8ad911fbca439a7eba783f4886235f39c3b" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.902211 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb2240237800a69dc1095fbcf1c4e8ad911fbca439a7eba783f4886235f39c3b"} err="failed to get container status \"fb2240237800a69dc1095fbcf1c4e8ad911fbca439a7eba783f4886235f39c3b\": rpc error: code = NotFound desc = could not find container \"fb2240237800a69dc1095fbcf1c4e8ad911fbca439a7eba783f4886235f39c3b\": container with ID starting with fb2240237800a69dc1095fbcf1c4e8ad911fbca439a7eba783f4886235f39c3b not found: ID does not exist" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.902225 4663 scope.go:117] "RemoveContainer" containerID="e5a39da0dd6270f52dfff528f85ae2e35ddf9c7564da632bcfe3baa7afb6aa2b" Dec 11 10:34:06 crc kubenswrapper[4663]: E1211 10:34:06.902400 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5a39da0dd6270f52dfff528f85ae2e35ddf9c7564da632bcfe3baa7afb6aa2b\": container with ID starting with e5a39da0dd6270f52dfff528f85ae2e35ddf9c7564da632bcfe3baa7afb6aa2b not found: ID does not exist" containerID="e5a39da0dd6270f52dfff528f85ae2e35ddf9c7564da632bcfe3baa7afb6aa2b" Dec 11 10:34:06 crc kubenswrapper[4663]: I1211 10:34:06.902422 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5a39da0dd6270f52dfff528f85ae2e35ddf9c7564da632bcfe3baa7afb6aa2b"} err="failed to get container status \"e5a39da0dd6270f52dfff528f85ae2e35ddf9c7564da632bcfe3baa7afb6aa2b\": rpc error: code = NotFound desc = could not find container \"e5a39da0dd6270f52dfff528f85ae2e35ddf9c7564da632bcfe3baa7afb6aa2b\": container with ID starting with e5a39da0dd6270f52dfff528f85ae2e35ddf9c7564da632bcfe3baa7afb6aa2b not found: ID does not exist" Dec 11 10:34:07 crc kubenswrapper[4663]: I1211 10:34:07.509779 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15171ea0-4465-4197-b101-d9d44b0b708d" path="/var/lib/kubelet/pods/15171ea0-4465-4197-b101-d9d44b0b708d/volumes" Dec 11 10:34:07 crc kubenswrapper[4663]: I1211 10:34:07.510458 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9677ab30-b776-48c5-b0f7-434f44ed5677" path="/var/lib/kubelet/pods/9677ab30-b776-48c5-b0f7-434f44ed5677/volumes" Dec 11 10:35:49 crc kubenswrapper[4663]: I1211 10:35:49.270794 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:35:49 crc kubenswrapper[4663]: I1211 10:35:49.271677 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:36:12 crc kubenswrapper[4663]: E1211 10:36:12.510509 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:36:12 crc kubenswrapper[4663]: E1211 10:36:12.510838 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:36:12 crc kubenswrapper[4663]: E1211 10:36:12.510934 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lrptx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-s4rfx_openstack-operators(4fa7b43e-65e7-4338-a694-8a7042d9fbcb): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" logger="UnhandledError" Dec 11 10:36:12 crc kubenswrapper[4663]: E1211 10:36:12.512089 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \\\"http://38.129.56.99:5001/v2/\\\": dial tcp 38.129.56.99:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:36:19 crc kubenswrapper[4663]: I1211 10:36:19.271543 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:36:19 crc kubenswrapper[4663]: I1211 10:36:19.271763 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:36:24 crc kubenswrapper[4663]: E1211 10:36:24.505876 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:36:49 crc kubenswrapper[4663]: I1211 10:36:49.271205 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:36:49 crc kubenswrapper[4663]: I1211 10:36:49.271554 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:36:49 crc kubenswrapper[4663]: I1211 10:36:49.271590 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:36:49 crc kubenswrapper[4663]: I1211 10:36:49.272002 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c43fdb9398afc1e364205f1d182147a1fb7e384c431a0fe48c53bca2dc7a3515"} pod="openshift-machine-config-operator/machine-config-daemon-mq524" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:36:49 crc kubenswrapper[4663]: I1211 10:36:49.272060 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" containerID="cri-o://c43fdb9398afc1e364205f1d182147a1fb7e384c431a0fe48c53bca2dc7a3515" gracePeriod=600 Dec 11 10:36:49 crc kubenswrapper[4663]: I1211 10:36:49.546704 4663 generic.go:334] "Generic (PLEG): container finished" podID="98941020-2764-490d-81db-9decd21aaa13" containerID="c43fdb9398afc1e364205f1d182147a1fb7e384c431a0fe48c53bca2dc7a3515" exitCode=0 Dec 11 10:36:49 crc kubenswrapper[4663]: I1211 10:36:49.546760 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerDied","Data":"c43fdb9398afc1e364205f1d182147a1fb7e384c431a0fe48c53bca2dc7a3515"} Dec 11 10:36:49 crc kubenswrapper[4663]: I1211 10:36:49.546794 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerStarted","Data":"cd76ab168790519b11ee4793a6417d5f29225db1eb3d022fc37318e8ea33bb64"} Dec 11 10:36:49 crc kubenswrapper[4663]: I1211 10:36:49.546810 4663 scope.go:117] "RemoveContainer" containerID="8c8254fc38982caf8a920e34b1f8132d5bd94723c9a1ecbeeb5eec5817ed54be" Dec 11 10:38:38 crc kubenswrapper[4663]: E1211 10:38:38.509497 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:38:38 crc kubenswrapper[4663]: E1211 10:38:38.509843 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:38:38 crc kubenswrapper[4663]: E1211 10:38:38.509949 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lrptx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-s4rfx_openstack-operators(4fa7b43e-65e7-4338-a694-8a7042d9fbcb): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" logger="UnhandledError" Dec 11 10:38:38 crc kubenswrapper[4663]: E1211 10:38:38.511107 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \\\"http://38.129.56.99:5001/v2/\\\": dial tcp 38.129.56.99:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:38:49 crc kubenswrapper[4663]: I1211 10:38:49.271573 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:38:49 crc kubenswrapper[4663]: I1211 10:38:49.271943 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:38:53 crc kubenswrapper[4663]: E1211 10:38:53.506403 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:39:04 crc kubenswrapper[4663]: E1211 10:39:04.505650 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:39:19 crc kubenswrapper[4663]: I1211 10:39:19.271392 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:39:19 crc kubenswrapper[4663]: I1211 10:39:19.272311 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:39:19 crc kubenswrapper[4663]: I1211 10:39:19.508467 4663 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:39:49 crc kubenswrapper[4663]: I1211 10:39:49.271045 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:39:49 crc kubenswrapper[4663]: I1211 10:39:49.271468 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:39:49 crc kubenswrapper[4663]: I1211 10:39:49.271507 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:39:49 crc kubenswrapper[4663]: I1211 10:39:49.271927 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cd76ab168790519b11ee4793a6417d5f29225db1eb3d022fc37318e8ea33bb64"} pod="openshift-machine-config-operator/machine-config-daemon-mq524" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:39:49 crc kubenswrapper[4663]: I1211 10:39:49.271973 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" containerID="cri-o://cd76ab168790519b11ee4793a6417d5f29225db1eb3d022fc37318e8ea33bb64" gracePeriod=600 Dec 11 10:39:50 crc kubenswrapper[4663]: I1211 10:39:50.326440 4663 generic.go:334] "Generic (PLEG): container finished" podID="98941020-2764-490d-81db-9decd21aaa13" containerID="cd76ab168790519b11ee4793a6417d5f29225db1eb3d022fc37318e8ea33bb64" exitCode=0 Dec 11 10:39:50 crc kubenswrapper[4663]: I1211 10:39:50.326520 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerDied","Data":"cd76ab168790519b11ee4793a6417d5f29225db1eb3d022fc37318e8ea33bb64"} Dec 11 10:39:50 crc kubenswrapper[4663]: I1211 10:39:50.326673 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerStarted","Data":"abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907"} Dec 11 10:39:50 crc kubenswrapper[4663]: I1211 10:39:50.326694 4663 scope.go:117] "RemoveContainer" containerID="c43fdb9398afc1e364205f1d182147a1fb7e384c431a0fe48c53bca2dc7a3515" Dec 11 10:41:19 crc kubenswrapper[4663]: E1211 10:41:19.511343 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:41:19 crc kubenswrapper[4663]: E1211 10:41:19.511700 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:41:19 crc kubenswrapper[4663]: E1211 10:41:19.511803 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lrptx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-s4rfx_openstack-operators(4fa7b43e-65e7-4338-a694-8a7042d9fbcb): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" logger="UnhandledError" Dec 11 10:41:19 crc kubenswrapper[4663]: E1211 10:41:19.512949 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \\\"http://38.129.56.99:5001/v2/\\\": dial tcp 38.129.56.99:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:41:34 crc kubenswrapper[4663]: E1211 10:41:34.506678 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:41:47 crc kubenswrapper[4663]: E1211 10:41:47.506354 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:41:49 crc kubenswrapper[4663]: I1211 10:41:49.271024 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:41:49 crc kubenswrapper[4663]: I1211 10:41:49.271087 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:42:00 crc kubenswrapper[4663]: E1211 10:42:00.505954 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:42:11 crc kubenswrapper[4663]: E1211 10:42:11.506101 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:42:19 crc kubenswrapper[4663]: I1211 10:42:19.270619 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:42:19 crc kubenswrapper[4663]: I1211 10:42:19.270951 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:42:24 crc kubenswrapper[4663]: E1211 10:42:24.506211 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:42:35 crc kubenswrapper[4663]: E1211 10:42:35.505918 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:42:49 crc kubenswrapper[4663]: I1211 10:42:49.271338 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:42:49 crc kubenswrapper[4663]: I1211 10:42:49.271655 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:42:49 crc kubenswrapper[4663]: I1211 10:42:49.271688 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:42:49 crc kubenswrapper[4663]: I1211 10:42:49.272053 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907"} pod="openshift-machine-config-operator/machine-config-daemon-mq524" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:42:49 crc kubenswrapper[4663]: I1211 10:42:49.272101 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" containerID="cri-o://abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" gracePeriod=600 Dec 11 10:42:49 crc kubenswrapper[4663]: E1211 10:42:49.384346 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:42:50 crc kubenswrapper[4663]: I1211 10:42:50.088943 4663 generic.go:334] "Generic (PLEG): container finished" podID="98941020-2764-490d-81db-9decd21aaa13" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" exitCode=0 Dec 11 10:42:50 crc kubenswrapper[4663]: I1211 10:42:50.089022 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerDied","Data":"abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907"} Dec 11 10:42:50 crc kubenswrapper[4663]: I1211 10:42:50.089196 4663 scope.go:117] "RemoveContainer" containerID="cd76ab168790519b11ee4793a6417d5f29225db1eb3d022fc37318e8ea33bb64" Dec 11 10:42:50 crc kubenswrapper[4663]: I1211 10:42:50.089589 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:42:50 crc kubenswrapper[4663]: E1211 10:42:50.089800 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:43:02 crc kubenswrapper[4663]: I1211 10:43:02.504507 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:43:02 crc kubenswrapper[4663]: E1211 10:43:02.505177 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:43:15 crc kubenswrapper[4663]: I1211 10:43:15.505098 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:43:15 crc kubenswrapper[4663]: E1211 10:43:15.505776 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:43:28 crc kubenswrapper[4663]: I1211 10:43:28.504093 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:43:28 crc kubenswrapper[4663]: E1211 10:43:28.504706 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:43:42 crc kubenswrapper[4663]: I1211 10:43:42.504166 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:43:42 crc kubenswrapper[4663]: E1211 10:43:42.504748 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:43:56 crc kubenswrapper[4663]: I1211 10:43:56.504768 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:43:56 crc kubenswrapper[4663]: E1211 10:43:56.505346 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.507446 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:44:09 crc kubenswrapper[4663]: E1211 10:44:09.507942 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.765665 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mx4sh"] Dec 11 10:44:09 crc kubenswrapper[4663]: E1211 10:44:09.765838 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15171ea0-4465-4197-b101-d9d44b0b708d" containerName="extract-content" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.765859 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="15171ea0-4465-4197-b101-d9d44b0b708d" containerName="extract-content" Dec 11 10:44:09 crc kubenswrapper[4663]: E1211 10:44:09.765876 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15171ea0-4465-4197-b101-d9d44b0b708d" containerName="extract-utilities" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.765882 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="15171ea0-4465-4197-b101-d9d44b0b708d" containerName="extract-utilities" Dec 11 10:44:09 crc kubenswrapper[4663]: E1211 10:44:09.765890 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15171ea0-4465-4197-b101-d9d44b0b708d" containerName="registry-server" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.765896 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="15171ea0-4465-4197-b101-d9d44b0b708d" containerName="registry-server" Dec 11 10:44:09 crc kubenswrapper[4663]: E1211 10:44:09.765925 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9677ab30-b776-48c5-b0f7-434f44ed5677" containerName="registry-server" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.765931 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="9677ab30-b776-48c5-b0f7-434f44ed5677" containerName="registry-server" Dec 11 10:44:09 crc kubenswrapper[4663]: E1211 10:44:09.765938 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9677ab30-b776-48c5-b0f7-434f44ed5677" containerName="extract-content" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.765944 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="9677ab30-b776-48c5-b0f7-434f44ed5677" containerName="extract-content" Dec 11 10:44:09 crc kubenswrapper[4663]: E1211 10:44:09.765952 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9677ab30-b776-48c5-b0f7-434f44ed5677" containerName="extract-utilities" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.765957 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="9677ab30-b776-48c5-b0f7-434f44ed5677" containerName="extract-utilities" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.766057 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="9677ab30-b776-48c5-b0f7-434f44ed5677" containerName="registry-server" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.766069 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="15171ea0-4465-4197-b101-d9d44b0b708d" containerName="registry-server" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.766756 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.772481 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mx4sh"] Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.836406 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-catalog-content\") pod \"certified-operators-mx4sh\" (UID: \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\") " pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.836472 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd5j7\" (UniqueName: \"kubernetes.io/projected/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-kube-api-access-bd5j7\") pod \"certified-operators-mx4sh\" (UID: \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\") " pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.836493 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-utilities\") pod \"certified-operators-mx4sh\" (UID: \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\") " pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.937926 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-catalog-content\") pod \"certified-operators-mx4sh\" (UID: \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\") " pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.938011 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd5j7\" (UniqueName: \"kubernetes.io/projected/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-kube-api-access-bd5j7\") pod \"certified-operators-mx4sh\" (UID: \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\") " pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.938038 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-utilities\") pod \"certified-operators-mx4sh\" (UID: \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\") " pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.938473 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-utilities\") pod \"certified-operators-mx4sh\" (UID: \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\") " pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.938591 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-catalog-content\") pod \"certified-operators-mx4sh\" (UID: \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\") " pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:09 crc kubenswrapper[4663]: I1211 10:44:09.953980 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd5j7\" (UniqueName: \"kubernetes.io/projected/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-kube-api-access-bd5j7\") pod \"certified-operators-mx4sh\" (UID: \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\") " pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:10 crc kubenswrapper[4663]: I1211 10:44:10.079587 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:10 crc kubenswrapper[4663]: I1211 10:44:10.282073 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mx4sh"] Dec 11 10:44:10 crc kubenswrapper[4663]: I1211 10:44:10.436124 4663 generic.go:334] "Generic (PLEG): container finished" podID="357793f9-0ea0-411c-8a40-7e6dfaf69d3f" containerID="7a068c223385130721e630a95ede14ee13c094d58adaca14bf661bf37b952759" exitCode=0 Dec 11 10:44:10 crc kubenswrapper[4663]: I1211 10:44:10.436161 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx4sh" event={"ID":"357793f9-0ea0-411c-8a40-7e6dfaf69d3f","Type":"ContainerDied","Data":"7a068c223385130721e630a95ede14ee13c094d58adaca14bf661bf37b952759"} Dec 11 10:44:10 crc kubenswrapper[4663]: I1211 10:44:10.436198 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx4sh" event={"ID":"357793f9-0ea0-411c-8a40-7e6dfaf69d3f","Type":"ContainerStarted","Data":"049bd7f6123865f163a56c3dd76bef84e30376941b3550d47f521238482f034b"} Dec 11 10:44:11 crc kubenswrapper[4663]: I1211 10:44:11.442519 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx4sh" event={"ID":"357793f9-0ea0-411c-8a40-7e6dfaf69d3f","Type":"ContainerStarted","Data":"b2f8432ae48fadd12a0874c384fa342199a8e1640c80ed52da204f1528844743"} Dec 11 10:44:12 crc kubenswrapper[4663]: I1211 10:44:12.448712 4663 generic.go:334] "Generic (PLEG): container finished" podID="357793f9-0ea0-411c-8a40-7e6dfaf69d3f" containerID="b2f8432ae48fadd12a0874c384fa342199a8e1640c80ed52da204f1528844743" exitCode=0 Dec 11 10:44:12 crc kubenswrapper[4663]: I1211 10:44:12.448791 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx4sh" event={"ID":"357793f9-0ea0-411c-8a40-7e6dfaf69d3f","Type":"ContainerDied","Data":"b2f8432ae48fadd12a0874c384fa342199a8e1640c80ed52da204f1528844743"} Dec 11 10:44:13 crc kubenswrapper[4663]: I1211 10:44:13.454884 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx4sh" event={"ID":"357793f9-0ea0-411c-8a40-7e6dfaf69d3f","Type":"ContainerStarted","Data":"bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b"} Dec 11 10:44:13 crc kubenswrapper[4663]: I1211 10:44:13.466984 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mx4sh" podStartSLOduration=1.8528587330000001 podStartE2EDuration="4.466970953s" podCreationTimestamp="2025-12-11 10:44:09 +0000 UTC" firstStartedPulling="2025-12-11 10:44:10.437478229 +0000 UTC m=+1581.255408013" lastFinishedPulling="2025-12-11 10:44:13.051590449 +0000 UTC m=+1583.869520233" observedRunningTime="2025-12-11 10:44:13.465993766 +0000 UTC m=+1584.283923550" watchObservedRunningTime="2025-12-11 10:44:13.466970953 +0000 UTC m=+1584.284900737" Dec 11 10:44:20 crc kubenswrapper[4663]: I1211 10:44:20.080571 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:20 crc kubenswrapper[4663]: I1211 10:44:20.081666 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:20 crc kubenswrapper[4663]: I1211 10:44:20.107522 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:20 crc kubenswrapper[4663]: I1211 10:44:20.510653 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:20 crc kubenswrapper[4663]: I1211 10:44:20.541461 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mx4sh"] Dec 11 10:44:22 crc kubenswrapper[4663]: I1211 10:44:22.492336 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mx4sh" podUID="357793f9-0ea0-411c-8a40-7e6dfaf69d3f" containerName="registry-server" containerID="cri-o://bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b" gracePeriod=2 Dec 11 10:44:22 crc kubenswrapper[4663]: I1211 10:44:22.504023 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:44:22 crc kubenswrapper[4663]: E1211 10:44:22.504309 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:44:22 crc kubenswrapper[4663]: I1211 10:44:22.799550 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:22 crc kubenswrapper[4663]: I1211 10:44:22.884069 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bd5j7\" (UniqueName: \"kubernetes.io/projected/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-kube-api-access-bd5j7\") pod \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\" (UID: \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\") " Dec 11 10:44:22 crc kubenswrapper[4663]: I1211 10:44:22.884117 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-utilities\") pod \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\" (UID: \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\") " Dec 11 10:44:22 crc kubenswrapper[4663]: I1211 10:44:22.884242 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-catalog-content\") pod \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\" (UID: \"357793f9-0ea0-411c-8a40-7e6dfaf69d3f\") " Dec 11 10:44:22 crc kubenswrapper[4663]: I1211 10:44:22.884788 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-utilities" (OuterVolumeSpecName: "utilities") pod "357793f9-0ea0-411c-8a40-7e6dfaf69d3f" (UID: "357793f9-0ea0-411c-8a40-7e6dfaf69d3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:44:22 crc kubenswrapper[4663]: I1211 10:44:22.888012 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-kube-api-access-bd5j7" (OuterVolumeSpecName: "kube-api-access-bd5j7") pod "357793f9-0ea0-411c-8a40-7e6dfaf69d3f" (UID: "357793f9-0ea0-411c-8a40-7e6dfaf69d3f"). InnerVolumeSpecName "kube-api-access-bd5j7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:44:22 crc kubenswrapper[4663]: I1211 10:44:22.919792 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "357793f9-0ea0-411c-8a40-7e6dfaf69d3f" (UID: "357793f9-0ea0-411c-8a40-7e6dfaf69d3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:44:22 crc kubenswrapper[4663]: I1211 10:44:22.985719 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bd5j7\" (UniqueName: \"kubernetes.io/projected/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-kube-api-access-bd5j7\") on node \"crc\" DevicePath \"\"" Dec 11 10:44:22 crc kubenswrapper[4663]: I1211 10:44:22.985761 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:44:22 crc kubenswrapper[4663]: I1211 10:44:22.985771 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/357793f9-0ea0-411c-8a40-7e6dfaf69d3f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.498705 4663 generic.go:334] "Generic (PLEG): container finished" podID="357793f9-0ea0-411c-8a40-7e6dfaf69d3f" containerID="bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b" exitCode=0 Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.498747 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mx4sh" Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.498748 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx4sh" event={"ID":"357793f9-0ea0-411c-8a40-7e6dfaf69d3f","Type":"ContainerDied","Data":"bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b"} Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.498872 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx4sh" event={"ID":"357793f9-0ea0-411c-8a40-7e6dfaf69d3f","Type":"ContainerDied","Data":"049bd7f6123865f163a56c3dd76bef84e30376941b3550d47f521238482f034b"} Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.498896 4663 scope.go:117] "RemoveContainer" containerID="bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b" Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.510706 4663 scope.go:117] "RemoveContainer" containerID="b2f8432ae48fadd12a0874c384fa342199a8e1640c80ed52da204f1528844743" Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.519639 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mx4sh"] Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.529417 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mx4sh"] Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.531237 4663 scope.go:117] "RemoveContainer" containerID="7a068c223385130721e630a95ede14ee13c094d58adaca14bf661bf37b952759" Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.550609 4663 scope.go:117] "RemoveContainer" containerID="bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b" Dec 11 10:44:23 crc kubenswrapper[4663]: E1211 10:44:23.550929 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b\": container with ID starting with bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b not found: ID does not exist" containerID="bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b" Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.550969 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b"} err="failed to get container status \"bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b\": rpc error: code = NotFound desc = could not find container \"bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b\": container with ID starting with bf22ecd7fd9df79faa0696f510de2277dd375c9e8cd7fb005f4dbcf3d924545b not found: ID does not exist" Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.550992 4663 scope.go:117] "RemoveContainer" containerID="b2f8432ae48fadd12a0874c384fa342199a8e1640c80ed52da204f1528844743" Dec 11 10:44:23 crc kubenswrapper[4663]: E1211 10:44:23.551350 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2f8432ae48fadd12a0874c384fa342199a8e1640c80ed52da204f1528844743\": container with ID starting with b2f8432ae48fadd12a0874c384fa342199a8e1640c80ed52da204f1528844743 not found: ID does not exist" containerID="b2f8432ae48fadd12a0874c384fa342199a8e1640c80ed52da204f1528844743" Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.551372 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2f8432ae48fadd12a0874c384fa342199a8e1640c80ed52da204f1528844743"} err="failed to get container status \"b2f8432ae48fadd12a0874c384fa342199a8e1640c80ed52da204f1528844743\": rpc error: code = NotFound desc = could not find container \"b2f8432ae48fadd12a0874c384fa342199a8e1640c80ed52da204f1528844743\": container with ID starting with b2f8432ae48fadd12a0874c384fa342199a8e1640c80ed52da204f1528844743 not found: ID does not exist" Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.551387 4663 scope.go:117] "RemoveContainer" containerID="7a068c223385130721e630a95ede14ee13c094d58adaca14bf661bf37b952759" Dec 11 10:44:23 crc kubenswrapper[4663]: E1211 10:44:23.551620 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a068c223385130721e630a95ede14ee13c094d58adaca14bf661bf37b952759\": container with ID starting with 7a068c223385130721e630a95ede14ee13c094d58adaca14bf661bf37b952759 not found: ID does not exist" containerID="7a068c223385130721e630a95ede14ee13c094d58adaca14bf661bf37b952759" Dec 11 10:44:23 crc kubenswrapper[4663]: I1211 10:44:23.551648 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a068c223385130721e630a95ede14ee13c094d58adaca14bf661bf37b952759"} err="failed to get container status \"7a068c223385130721e630a95ede14ee13c094d58adaca14bf661bf37b952759\": rpc error: code = NotFound desc = could not find container \"7a068c223385130721e630a95ede14ee13c094d58adaca14bf661bf37b952759\": container with ID starting with 7a068c223385130721e630a95ede14ee13c094d58adaca14bf661bf37b952759 not found: ID does not exist" Dec 11 10:44:25 crc kubenswrapper[4663]: I1211 10:44:25.510039 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="357793f9-0ea0-411c-8a40-7e6dfaf69d3f" path="/var/lib/kubelet/pods/357793f9-0ea0-411c-8a40-7e6dfaf69d3f/volumes" Dec 11 10:44:36 crc kubenswrapper[4663]: I1211 10:44:36.504625 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:44:36 crc kubenswrapper[4663]: E1211 10:44:36.505131 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:44:46 crc kubenswrapper[4663]: E1211 10:44:46.510403 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:44:46 crc kubenswrapper[4663]: E1211 10:44:46.510792 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:44:46 crc kubenswrapper[4663]: E1211 10:44:46.510902 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lrptx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-s4rfx_openstack-operators(4fa7b43e-65e7-4338-a694-8a7042d9fbcb): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" logger="UnhandledError" Dec 11 10:44:46 crc kubenswrapper[4663]: E1211 10:44:46.512059 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \\\"http://38.129.56.99:5001/v2/\\\": dial tcp 38.129.56.99:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.302458 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5xt4k"] Dec 11 10:44:47 crc kubenswrapper[4663]: E1211 10:44:47.302899 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="357793f9-0ea0-411c-8a40-7e6dfaf69d3f" containerName="extract-utilities" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.302911 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="357793f9-0ea0-411c-8a40-7e6dfaf69d3f" containerName="extract-utilities" Dec 11 10:44:47 crc kubenswrapper[4663]: E1211 10:44:47.302922 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="357793f9-0ea0-411c-8a40-7e6dfaf69d3f" containerName="registry-server" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.302928 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="357793f9-0ea0-411c-8a40-7e6dfaf69d3f" containerName="registry-server" Dec 11 10:44:47 crc kubenswrapper[4663]: E1211 10:44:47.302937 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="357793f9-0ea0-411c-8a40-7e6dfaf69d3f" containerName="extract-content" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.302942 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="357793f9-0ea0-411c-8a40-7e6dfaf69d3f" containerName="extract-content" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.303042 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="357793f9-0ea0-411c-8a40-7e6dfaf69d3f" containerName="registry-server" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.303704 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.319890 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5xt4k"] Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.350594 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-catalog-content\") pod \"community-operators-5xt4k\" (UID: \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\") " pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.350735 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4db5w\" (UniqueName: \"kubernetes.io/projected/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-kube-api-access-4db5w\") pod \"community-operators-5xt4k\" (UID: \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\") " pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.350757 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-utilities\") pod \"community-operators-5xt4k\" (UID: \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\") " pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.452276 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4db5w\" (UniqueName: \"kubernetes.io/projected/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-kube-api-access-4db5w\") pod \"community-operators-5xt4k\" (UID: \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\") " pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.452504 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-utilities\") pod \"community-operators-5xt4k\" (UID: \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\") " pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.452604 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-catalog-content\") pod \"community-operators-5xt4k\" (UID: \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\") " pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.453068 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-utilities\") pod \"community-operators-5xt4k\" (UID: \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\") " pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.453083 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-catalog-content\") pod \"community-operators-5xt4k\" (UID: \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\") " pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.468202 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4db5w\" (UniqueName: \"kubernetes.io/projected/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-kube-api-access-4db5w\") pod \"community-operators-5xt4k\" (UID: \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\") " pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.504572 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:44:47 crc kubenswrapper[4663]: E1211 10:44:47.504810 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.628159 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:47 crc kubenswrapper[4663]: I1211 10:44:47.973617 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5xt4k"] Dec 11 10:44:47 crc kubenswrapper[4663]: W1211 10:44:47.977780 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6becf5f_8e49_43f3_b94e_c74a2fdcd667.slice/crio-cfead1681be57187daf74221d20bb929e8ab5beb0e8ef047efb922d382016f8f WatchSource:0}: Error finding container cfead1681be57187daf74221d20bb929e8ab5beb0e8ef047efb922d382016f8f: Status 404 returned error can't find the container with id cfead1681be57187daf74221d20bb929e8ab5beb0e8ef047efb922d382016f8f Dec 11 10:44:48 crc kubenswrapper[4663]: I1211 10:44:48.615159 4663 generic.go:334] "Generic (PLEG): container finished" podID="e6becf5f-8e49-43f3-b94e-c74a2fdcd667" containerID="38fad4805bb2084644afc482d492e219b5b10782af6863d566ae002ad4a10fa6" exitCode=0 Dec 11 10:44:48 crc kubenswrapper[4663]: I1211 10:44:48.615397 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xt4k" event={"ID":"e6becf5f-8e49-43f3-b94e-c74a2fdcd667","Type":"ContainerDied","Data":"38fad4805bb2084644afc482d492e219b5b10782af6863d566ae002ad4a10fa6"} Dec 11 10:44:48 crc kubenswrapper[4663]: I1211 10:44:48.615420 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xt4k" event={"ID":"e6becf5f-8e49-43f3-b94e-c74a2fdcd667","Type":"ContainerStarted","Data":"cfead1681be57187daf74221d20bb929e8ab5beb0e8ef047efb922d382016f8f"} Dec 11 10:44:48 crc kubenswrapper[4663]: I1211 10:44:48.616794 4663 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:44:50 crc kubenswrapper[4663]: I1211 10:44:50.625145 4663 generic.go:334] "Generic (PLEG): container finished" podID="e6becf5f-8e49-43f3-b94e-c74a2fdcd667" containerID="8d5b69ed1694e57ccd329d09429c05dc9757c84371344b7844734add66000c34" exitCode=0 Dec 11 10:44:50 crc kubenswrapper[4663]: I1211 10:44:50.625223 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xt4k" event={"ID":"e6becf5f-8e49-43f3-b94e-c74a2fdcd667","Type":"ContainerDied","Data":"8d5b69ed1694e57ccd329d09429c05dc9757c84371344b7844734add66000c34"} Dec 11 10:44:51 crc kubenswrapper[4663]: I1211 10:44:51.632305 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xt4k" event={"ID":"e6becf5f-8e49-43f3-b94e-c74a2fdcd667","Type":"ContainerStarted","Data":"35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c"} Dec 11 10:44:51 crc kubenswrapper[4663]: I1211 10:44:51.647890 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5xt4k" podStartSLOduration=1.98909338 podStartE2EDuration="4.647875075s" podCreationTimestamp="2025-12-11 10:44:47 +0000 UTC" firstStartedPulling="2025-12-11 10:44:48.616593676 +0000 UTC m=+1619.434523460" lastFinishedPulling="2025-12-11 10:44:51.275375371 +0000 UTC m=+1622.093305155" observedRunningTime="2025-12-11 10:44:51.643579739 +0000 UTC m=+1622.461509523" watchObservedRunningTime="2025-12-11 10:44:51.647875075 +0000 UTC m=+1622.465804859" Dec 11 10:44:57 crc kubenswrapper[4663]: I1211 10:44:57.628708 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:57 crc kubenswrapper[4663]: I1211 10:44:57.629479 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:57 crc kubenswrapper[4663]: I1211 10:44:57.656973 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:57 crc kubenswrapper[4663]: I1211 10:44:57.682693 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:44:57 crc kubenswrapper[4663]: I1211 10:44:57.877752 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5xt4k"] Dec 11 10:44:58 crc kubenswrapper[4663]: I1211 10:44:58.504518 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:44:58 crc kubenswrapper[4663]: E1211 10:44:58.504692 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:44:58 crc kubenswrapper[4663]: E1211 10:44:58.506049 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:44:59 crc kubenswrapper[4663]: I1211 10:44:59.665319 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5xt4k" podUID="e6becf5f-8e49-43f3-b94e-c74a2fdcd667" containerName="registry-server" containerID="cri-o://35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c" gracePeriod=2 Dec 11 10:44:59 crc kubenswrapper[4663]: I1211 10:44:59.960377 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.091558 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4db5w\" (UniqueName: \"kubernetes.io/projected/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-kube-api-access-4db5w\") pod \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\" (UID: \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\") " Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.091608 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-catalog-content\") pod \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\" (UID: \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\") " Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.091665 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-utilities\") pod \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\" (UID: \"e6becf5f-8e49-43f3-b94e-c74a2fdcd667\") " Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.092421 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-utilities" (OuterVolumeSpecName: "utilities") pod "e6becf5f-8e49-43f3-b94e-c74a2fdcd667" (UID: "e6becf5f-8e49-43f3-b94e-c74a2fdcd667"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.096084 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-kube-api-access-4db5w" (OuterVolumeSpecName: "kube-api-access-4db5w") pod "e6becf5f-8e49-43f3-b94e-c74a2fdcd667" (UID: "e6becf5f-8e49-43f3-b94e-c74a2fdcd667"). InnerVolumeSpecName "kube-api-access-4db5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.128247 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e6becf5f-8e49-43f3-b94e-c74a2fdcd667" (UID: "e6becf5f-8e49-43f3-b94e-c74a2fdcd667"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.144373 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww"] Dec 11 10:45:00 crc kubenswrapper[4663]: E1211 10:45:00.144760 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6becf5f-8e49-43f3-b94e-c74a2fdcd667" containerName="extract-utilities" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.144864 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6becf5f-8e49-43f3-b94e-c74a2fdcd667" containerName="extract-utilities" Dec 11 10:45:00 crc kubenswrapper[4663]: E1211 10:45:00.144950 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6becf5f-8e49-43f3-b94e-c74a2fdcd667" containerName="registry-server" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.145000 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6becf5f-8e49-43f3-b94e-c74a2fdcd667" containerName="registry-server" Dec 11 10:45:00 crc kubenswrapper[4663]: E1211 10:45:00.145105 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6becf5f-8e49-43f3-b94e-c74a2fdcd667" containerName="extract-content" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.145153 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6becf5f-8e49-43f3-b94e-c74a2fdcd667" containerName="extract-content" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.145406 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6becf5f-8e49-43f3-b94e-c74a2fdcd667" containerName="registry-server" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.145915 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.148462 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.148500 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.195064 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnm25\" (UniqueName: \"kubernetes.io/projected/82e810d3-8803-4f24-9d61-0958d6b25c8f-kube-api-access-tnm25\") pod \"collect-profiles-29424165-f2jww\" (UID: \"82e810d3-8803-4f24-9d61-0958d6b25c8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.195125 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/82e810d3-8803-4f24-9d61-0958d6b25c8f-config-volume\") pod \"collect-profiles-29424165-f2jww\" (UID: \"82e810d3-8803-4f24-9d61-0958d6b25c8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.197086 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww"] Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.197288 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/82e810d3-8803-4f24-9d61-0958d6b25c8f-secret-volume\") pod \"collect-profiles-29424165-f2jww\" (UID: \"82e810d3-8803-4f24-9d61-0958d6b25c8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.197404 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4db5w\" (UniqueName: \"kubernetes.io/projected/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-kube-api-access-4db5w\") on node \"crc\" DevicePath \"\"" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.197420 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.197430 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6becf5f-8e49-43f3-b94e-c74a2fdcd667-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.299069 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/82e810d3-8803-4f24-9d61-0958d6b25c8f-secret-volume\") pod \"collect-profiles-29424165-f2jww\" (UID: \"82e810d3-8803-4f24-9d61-0958d6b25c8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.299555 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnm25\" (UniqueName: \"kubernetes.io/projected/82e810d3-8803-4f24-9d61-0958d6b25c8f-kube-api-access-tnm25\") pod \"collect-profiles-29424165-f2jww\" (UID: \"82e810d3-8803-4f24-9d61-0958d6b25c8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.299629 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/82e810d3-8803-4f24-9d61-0958d6b25c8f-config-volume\") pod \"collect-profiles-29424165-f2jww\" (UID: \"82e810d3-8803-4f24-9d61-0958d6b25c8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.300277 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/82e810d3-8803-4f24-9d61-0958d6b25c8f-config-volume\") pod \"collect-profiles-29424165-f2jww\" (UID: \"82e810d3-8803-4f24-9d61-0958d6b25c8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.301682 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/82e810d3-8803-4f24-9d61-0958d6b25c8f-secret-volume\") pod \"collect-profiles-29424165-f2jww\" (UID: \"82e810d3-8803-4f24-9d61-0958d6b25c8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.311978 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnm25\" (UniqueName: \"kubernetes.io/projected/82e810d3-8803-4f24-9d61-0958d6b25c8f-kube-api-access-tnm25\") pod \"collect-profiles-29424165-f2jww\" (UID: \"82e810d3-8803-4f24-9d61-0958d6b25c8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.458717 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.671361 4663 generic.go:334] "Generic (PLEG): container finished" podID="e6becf5f-8e49-43f3-b94e-c74a2fdcd667" containerID="35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c" exitCode=0 Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.671426 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5xt4k" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.671445 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xt4k" event={"ID":"e6becf5f-8e49-43f3-b94e-c74a2fdcd667","Type":"ContainerDied","Data":"35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c"} Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.671687 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xt4k" event={"ID":"e6becf5f-8e49-43f3-b94e-c74a2fdcd667","Type":"ContainerDied","Data":"cfead1681be57187daf74221d20bb929e8ab5beb0e8ef047efb922d382016f8f"} Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.671705 4663 scope.go:117] "RemoveContainer" containerID="35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.685083 4663 scope.go:117] "RemoveContainer" containerID="8d5b69ed1694e57ccd329d09429c05dc9757c84371344b7844734add66000c34" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.693195 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5xt4k"] Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.697055 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5xt4k"] Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.698784 4663 scope.go:117] "RemoveContainer" containerID="38fad4805bb2084644afc482d492e219b5b10782af6863d566ae002ad4a10fa6" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.722899 4663 scope.go:117] "RemoveContainer" containerID="35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c" Dec 11 10:45:00 crc kubenswrapper[4663]: E1211 10:45:00.723355 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c\": container with ID starting with 35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c not found: ID does not exist" containerID="35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.723403 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c"} err="failed to get container status \"35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c\": rpc error: code = NotFound desc = could not find container \"35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c\": container with ID starting with 35901096d41a4b045664b6eb5ae774ee098f8e21a3e563fe1ea235c5e3ea278c not found: ID does not exist" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.723423 4663 scope.go:117] "RemoveContainer" containerID="8d5b69ed1694e57ccd329d09429c05dc9757c84371344b7844734add66000c34" Dec 11 10:45:00 crc kubenswrapper[4663]: E1211 10:45:00.723677 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d5b69ed1694e57ccd329d09429c05dc9757c84371344b7844734add66000c34\": container with ID starting with 8d5b69ed1694e57ccd329d09429c05dc9757c84371344b7844734add66000c34 not found: ID does not exist" containerID="8d5b69ed1694e57ccd329d09429c05dc9757c84371344b7844734add66000c34" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.723714 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d5b69ed1694e57ccd329d09429c05dc9757c84371344b7844734add66000c34"} err="failed to get container status \"8d5b69ed1694e57ccd329d09429c05dc9757c84371344b7844734add66000c34\": rpc error: code = NotFound desc = could not find container \"8d5b69ed1694e57ccd329d09429c05dc9757c84371344b7844734add66000c34\": container with ID starting with 8d5b69ed1694e57ccd329d09429c05dc9757c84371344b7844734add66000c34 not found: ID does not exist" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.723738 4663 scope.go:117] "RemoveContainer" containerID="38fad4805bb2084644afc482d492e219b5b10782af6863d566ae002ad4a10fa6" Dec 11 10:45:00 crc kubenswrapper[4663]: E1211 10:45:00.724118 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38fad4805bb2084644afc482d492e219b5b10782af6863d566ae002ad4a10fa6\": container with ID starting with 38fad4805bb2084644afc482d492e219b5b10782af6863d566ae002ad4a10fa6 not found: ID does not exist" containerID="38fad4805bb2084644afc482d492e219b5b10782af6863d566ae002ad4a10fa6" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.724157 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38fad4805bb2084644afc482d492e219b5b10782af6863d566ae002ad4a10fa6"} err="failed to get container status \"38fad4805bb2084644afc482d492e219b5b10782af6863d566ae002ad4a10fa6\": rpc error: code = NotFound desc = could not find container \"38fad4805bb2084644afc482d492e219b5b10782af6863d566ae002ad4a10fa6\": container with ID starting with 38fad4805bb2084644afc482d492e219b5b10782af6863d566ae002ad4a10fa6 not found: ID does not exist" Dec 11 10:45:00 crc kubenswrapper[4663]: I1211 10:45:00.790874 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww"] Dec 11 10:45:01 crc kubenswrapper[4663]: I1211 10:45:01.510232 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6becf5f-8e49-43f3-b94e-c74a2fdcd667" path="/var/lib/kubelet/pods/e6becf5f-8e49-43f3-b94e-c74a2fdcd667/volumes" Dec 11 10:45:01 crc kubenswrapper[4663]: I1211 10:45:01.677742 4663 generic.go:334] "Generic (PLEG): container finished" podID="82e810d3-8803-4f24-9d61-0958d6b25c8f" containerID="b25ca7b729b0d658e6c8b8e34f4e135e3bdf5e17936e1ed29dfb3c0bc39c5427" exitCode=0 Dec 11 10:45:01 crc kubenswrapper[4663]: I1211 10:45:01.677781 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" event={"ID":"82e810d3-8803-4f24-9d61-0958d6b25c8f","Type":"ContainerDied","Data":"b25ca7b729b0d658e6c8b8e34f4e135e3bdf5e17936e1ed29dfb3c0bc39c5427"} Dec 11 10:45:01 crc kubenswrapper[4663]: I1211 10:45:01.678004 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" event={"ID":"82e810d3-8803-4f24-9d61-0958d6b25c8f","Type":"ContainerStarted","Data":"6dcc27162c5fe14ba885b9de43751f3f487178a75cdbed9fbd752387f7bbbb34"} Dec 11 10:45:02 crc kubenswrapper[4663]: I1211 10:45:02.853380 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:02 crc kubenswrapper[4663]: I1211 10:45:02.926606 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/82e810d3-8803-4f24-9d61-0958d6b25c8f-config-volume\") pod \"82e810d3-8803-4f24-9d61-0958d6b25c8f\" (UID: \"82e810d3-8803-4f24-9d61-0958d6b25c8f\") " Dec 11 10:45:02 crc kubenswrapper[4663]: I1211 10:45:02.926673 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnm25\" (UniqueName: \"kubernetes.io/projected/82e810d3-8803-4f24-9d61-0958d6b25c8f-kube-api-access-tnm25\") pod \"82e810d3-8803-4f24-9d61-0958d6b25c8f\" (UID: \"82e810d3-8803-4f24-9d61-0958d6b25c8f\") " Dec 11 10:45:02 crc kubenswrapper[4663]: I1211 10:45:02.926697 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/82e810d3-8803-4f24-9d61-0958d6b25c8f-secret-volume\") pod \"82e810d3-8803-4f24-9d61-0958d6b25c8f\" (UID: \"82e810d3-8803-4f24-9d61-0958d6b25c8f\") " Dec 11 10:45:02 crc kubenswrapper[4663]: I1211 10:45:02.927221 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82e810d3-8803-4f24-9d61-0958d6b25c8f-config-volume" (OuterVolumeSpecName: "config-volume") pod "82e810d3-8803-4f24-9d61-0958d6b25c8f" (UID: "82e810d3-8803-4f24-9d61-0958d6b25c8f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:45:02 crc kubenswrapper[4663]: I1211 10:45:02.930774 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82e810d3-8803-4f24-9d61-0958d6b25c8f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "82e810d3-8803-4f24-9d61-0958d6b25c8f" (UID: "82e810d3-8803-4f24-9d61-0958d6b25c8f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:45:02 crc kubenswrapper[4663]: I1211 10:45:02.930875 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82e810d3-8803-4f24-9d61-0958d6b25c8f-kube-api-access-tnm25" (OuterVolumeSpecName: "kube-api-access-tnm25") pod "82e810d3-8803-4f24-9d61-0958d6b25c8f" (UID: "82e810d3-8803-4f24-9d61-0958d6b25c8f"). InnerVolumeSpecName "kube-api-access-tnm25". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:45:03 crc kubenswrapper[4663]: I1211 10:45:03.028001 4663 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/82e810d3-8803-4f24-9d61-0958d6b25c8f-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:45:03 crc kubenswrapper[4663]: I1211 10:45:03.028029 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnm25\" (UniqueName: \"kubernetes.io/projected/82e810d3-8803-4f24-9d61-0958d6b25c8f-kube-api-access-tnm25\") on node \"crc\" DevicePath \"\"" Dec 11 10:45:03 crc kubenswrapper[4663]: I1211 10:45:03.028041 4663 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/82e810d3-8803-4f24-9d61-0958d6b25c8f-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:45:03 crc kubenswrapper[4663]: I1211 10:45:03.689001 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" event={"ID":"82e810d3-8803-4f24-9d61-0958d6b25c8f","Type":"ContainerDied","Data":"6dcc27162c5fe14ba885b9de43751f3f487178a75cdbed9fbd752387f7bbbb34"} Dec 11 10:45:03 crc kubenswrapper[4663]: I1211 10:45:03.689220 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6dcc27162c5fe14ba885b9de43751f3f487178a75cdbed9fbd752387f7bbbb34" Dec 11 10:45:03 crc kubenswrapper[4663]: I1211 10:45:03.689047 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-f2jww" Dec 11 10:45:10 crc kubenswrapper[4663]: I1211 10:45:10.504572 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:45:10 crc kubenswrapper[4663]: E1211 10:45:10.505122 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:45:12 crc kubenswrapper[4663]: E1211 10:45:12.506242 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:45:25 crc kubenswrapper[4663]: I1211 10:45:25.506051 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:45:25 crc kubenswrapper[4663]: E1211 10:45:25.506717 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:45:25 crc kubenswrapper[4663]: E1211 10:45:25.507666 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:45:36 crc kubenswrapper[4663]: E1211 10:45:36.506007 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:45:37 crc kubenswrapper[4663]: I1211 10:45:37.504379 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:45:37 crc kubenswrapper[4663]: E1211 10:45:37.504991 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:45:50 crc kubenswrapper[4663]: I1211 10:45:50.504101 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:45:50 crc kubenswrapper[4663]: E1211 10:45:50.504632 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:45:50 crc kubenswrapper[4663]: E1211 10:45:50.505298 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:46:02 crc kubenswrapper[4663]: I1211 10:46:02.504892 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:46:02 crc kubenswrapper[4663]: E1211 10:46:02.505337 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:46:02 crc kubenswrapper[4663]: E1211 10:46:02.505921 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:46:15 crc kubenswrapper[4663]: I1211 10:46:15.505060 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:46:15 crc kubenswrapper[4663]: E1211 10:46:15.505644 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:46:16 crc kubenswrapper[4663]: E1211 10:46:16.506069 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:46:27 crc kubenswrapper[4663]: I1211 10:46:27.504569 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:46:27 crc kubenswrapper[4663]: E1211 10:46:27.505089 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:46:27 crc kubenswrapper[4663]: E1211 10:46:27.505958 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:46:38 crc kubenswrapper[4663]: E1211 10:46:38.506231 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:46:39 crc kubenswrapper[4663]: I1211 10:46:39.507437 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:46:39 crc kubenswrapper[4663]: E1211 10:46:39.507659 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:46:51 crc kubenswrapper[4663]: I1211 10:46:51.504367 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:46:51 crc kubenswrapper[4663]: E1211 10:46:51.504858 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:46:51 crc kubenswrapper[4663]: E1211 10:46:51.505847 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:47:02 crc kubenswrapper[4663]: E1211 10:47:02.506330 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:47:05 crc kubenswrapper[4663]: I1211 10:47:05.504648 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:47:05 crc kubenswrapper[4663]: E1211 10:47:05.505011 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:47:17 crc kubenswrapper[4663]: E1211 10:47:17.513314 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:47:20 crc kubenswrapper[4663]: I1211 10:47:20.504712 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:47:20 crc kubenswrapper[4663]: E1211 10:47:20.505676 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:47:31 crc kubenswrapper[4663]: I1211 10:47:31.504281 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:47:31 crc kubenswrapper[4663]: E1211 10:47:31.504838 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.193284 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-d9fgd/must-gather-c2dvg"] Dec 11 10:47:35 crc kubenswrapper[4663]: E1211 10:47:35.194329 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82e810d3-8803-4f24-9d61-0958d6b25c8f" containerName="collect-profiles" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.194448 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="82e810d3-8803-4f24-9d61-0958d6b25c8f" containerName="collect-profiles" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.194639 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="82e810d3-8803-4f24-9d61-0958d6b25c8f" containerName="collect-profiles" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.195290 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-d9fgd/must-gather-c2dvg" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.196952 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-d9fgd"/"default-dockercfg-qz4wb" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.197194 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-d9fgd"/"kube-root-ca.crt" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.197234 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-d9fgd"/"openshift-service-ca.crt" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.210254 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-d9fgd/must-gather-c2dvg"] Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.297176 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhzlh\" (UniqueName: \"kubernetes.io/projected/e429baae-b346-40a5-9d72-5f724bb89914-kube-api-access-dhzlh\") pod \"must-gather-c2dvg\" (UID: \"e429baae-b346-40a5-9d72-5f724bb89914\") " pod="openshift-must-gather-d9fgd/must-gather-c2dvg" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.297282 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e429baae-b346-40a5-9d72-5f724bb89914-must-gather-output\") pod \"must-gather-c2dvg\" (UID: \"e429baae-b346-40a5-9d72-5f724bb89914\") " pod="openshift-must-gather-d9fgd/must-gather-c2dvg" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.398146 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhzlh\" (UniqueName: \"kubernetes.io/projected/e429baae-b346-40a5-9d72-5f724bb89914-kube-api-access-dhzlh\") pod \"must-gather-c2dvg\" (UID: \"e429baae-b346-40a5-9d72-5f724bb89914\") " pod="openshift-must-gather-d9fgd/must-gather-c2dvg" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.398452 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e429baae-b346-40a5-9d72-5f724bb89914-must-gather-output\") pod \"must-gather-c2dvg\" (UID: \"e429baae-b346-40a5-9d72-5f724bb89914\") " pod="openshift-must-gather-d9fgd/must-gather-c2dvg" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.398760 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e429baae-b346-40a5-9d72-5f724bb89914-must-gather-output\") pod \"must-gather-c2dvg\" (UID: \"e429baae-b346-40a5-9d72-5f724bb89914\") " pod="openshift-must-gather-d9fgd/must-gather-c2dvg" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.419653 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhzlh\" (UniqueName: \"kubernetes.io/projected/e429baae-b346-40a5-9d72-5f724bb89914-kube-api-access-dhzlh\") pod \"must-gather-c2dvg\" (UID: \"e429baae-b346-40a5-9d72-5f724bb89914\") " pod="openshift-must-gather-d9fgd/must-gather-c2dvg" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.508200 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-d9fgd/must-gather-c2dvg" Dec 11 10:47:35 crc kubenswrapper[4663]: I1211 10:47:35.849332 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-d9fgd/must-gather-c2dvg"] Dec 11 10:47:36 crc kubenswrapper[4663]: I1211 10:47:36.345579 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-d9fgd/must-gather-c2dvg" event={"ID":"e429baae-b346-40a5-9d72-5f724bb89914","Type":"ContainerStarted","Data":"003f3c3df61272dd907b0207d3ad621f000b0c85fdee89eb77be8c7785b7a142"} Dec 11 10:47:41 crc kubenswrapper[4663]: I1211 10:47:41.375710 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-d9fgd/must-gather-c2dvg" event={"ID":"e429baae-b346-40a5-9d72-5f724bb89914","Type":"ContainerStarted","Data":"e0568e0b10248779cb98ad3d8dd750a266858917e5d411b074b1c7116de90252"} Dec 11 10:47:41 crc kubenswrapper[4663]: I1211 10:47:41.376088 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-d9fgd/must-gather-c2dvg" event={"ID":"e429baae-b346-40a5-9d72-5f724bb89914","Type":"ContainerStarted","Data":"7b3bb8057daa6f8599dc1b27c40c51d8bb853f46db91836e324d9042ee2b806d"} Dec 11 10:47:41 crc kubenswrapper[4663]: I1211 10:47:41.387623 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-d9fgd/must-gather-c2dvg" podStartSLOduration=1.769209453 podStartE2EDuration="6.387608891s" podCreationTimestamp="2025-12-11 10:47:35 +0000 UTC" firstStartedPulling="2025-12-11 10:47:35.853778246 +0000 UTC m=+1786.671708030" lastFinishedPulling="2025-12-11 10:47:40.472177684 +0000 UTC m=+1791.290107468" observedRunningTime="2025-12-11 10:47:41.385865708 +0000 UTC m=+1792.203795492" watchObservedRunningTime="2025-12-11 10:47:41.387608891 +0000 UTC m=+1792.205538675" Dec 11 10:47:45 crc kubenswrapper[4663]: I1211 10:47:45.505014 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:47:45 crc kubenswrapper[4663]: E1211 10:47:45.505653 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mq524_openshift-machine-config-operator(98941020-2764-490d-81db-9decd21aaa13)\"" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" Dec 11 10:47:57 crc kubenswrapper[4663]: I1211 10:47:57.504042 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:47:58 crc kubenswrapper[4663]: I1211 10:47:58.460845 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerStarted","Data":"66b2e54d1041d60b055a6fae881d1566bf4ee973b483ad09b33e34ae87d7377a"} Dec 11 10:48:08 crc kubenswrapper[4663]: I1211 10:48:08.168432 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-fwjxr_32a7449e-b310-406a-b03a-4e2e461d11ae/control-plane-machine-set-operator/0.log" Dec 11 10:48:08 crc kubenswrapper[4663]: I1211 10:48:08.279407 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-g29lh_1001a5ac-b44d-41c2-979f-a987c9c6d63f/kube-rbac-proxy/0.log" Dec 11 10:48:08 crc kubenswrapper[4663]: I1211 10:48:08.279803 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-g29lh_1001a5ac-b44d-41c2-979f-a987c9c6d63f/machine-api-operator/0.log" Dec 11 10:48:16 crc kubenswrapper[4663]: I1211 10:48:16.327297 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-2fdfp_c4c3d255-ffd5-4eab-99b7-d62d9b664a16/cert-manager-controller/0.log" Dec 11 10:48:16 crc kubenswrapper[4663]: I1211 10:48:16.439431 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-zmshh_2f9a5b46-b79d-496d-9a78-132d85f4f115/cert-manager-cainjector/0.log" Dec 11 10:48:16 crc kubenswrapper[4663]: I1211 10:48:16.490265 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-m84qd_93fa4443-e685-41c3-9801-d2a37325674a/cert-manager-webhook/0.log" Dec 11 10:48:24 crc kubenswrapper[4663]: I1211 10:48:24.134755 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6ff7998486-47k77_ca2f1c81-8dbc-4d92-95ef-c70df44a0801/nmstate-console-plugin/0.log" Dec 11 10:48:24 crc kubenswrapper[4663]: I1211 10:48:24.202510 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-7hlpp_7a691467-080b-4036-b702-43977278b399/nmstate-handler/0.log" Dec 11 10:48:24 crc kubenswrapper[4663]: I1211 10:48:24.255076 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-xpdhc_66a3bc5f-1b5b-432b-9ea2-b3ea5f9fc313/kube-rbac-proxy/0.log" Dec 11 10:48:24 crc kubenswrapper[4663]: I1211 10:48:24.265426 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-xpdhc_66a3bc5f-1b5b-432b-9ea2-b3ea5f9fc313/nmstate-metrics/0.log" Dec 11 10:48:24 crc kubenswrapper[4663]: I1211 10:48:24.443435 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-6769fb99d-rg6tv_bc3b05ee-7cfe-4661-91fb-280a23f47129/nmstate-operator/0.log" Dec 11 10:48:24 crc kubenswrapper[4663]: I1211 10:48:24.500715 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-f8fb84555-jqmgx_bf360f09-bc5b-4440-8985-adc5db394494/nmstate-webhook/0.log" Dec 11 10:48:33 crc kubenswrapper[4663]: I1211 10:48:33.834749 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-62rrw_505c55d2-998b-4df8-bd05-f129b5f8a806/kube-rbac-proxy/0.log" Dec 11 10:48:33 crc kubenswrapper[4663]: I1211 10:48:33.888579 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-62rrw_505c55d2-998b-4df8-bd05-f129b5f8a806/controller/0.log" Dec 11 10:48:33 crc kubenswrapper[4663]: I1211 10:48:33.948774 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/cp-frr-files/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.071350 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/cp-frr-files/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.076254 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/cp-reloader/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.100483 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/cp-metrics/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.109413 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/cp-reloader/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.229026 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/cp-frr-files/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.262326 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/cp-metrics/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.264028 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/cp-metrics/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.271493 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/cp-reloader/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.380097 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/cp-frr-files/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.380242 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/cp-reloader/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.391507 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/controller/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.393313 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/cp-metrics/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.518574 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/frr-metrics/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.547495 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/kube-rbac-proxy-frr/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.547688 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/kube-rbac-proxy/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.580255 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/frr/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.651585 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cj69x_0169573a-ebd0-4a44-9181-5d156081599f/reloader/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.681380 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7784b6fcf-8bt7k_8f37f2bb-6cee-40a8-a2d2-366b8ded3853/frr-k8s-webhook-server/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.783709 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-f8979885c-gpvr8_46b9f037-3a95-414e-9089-95dd401c7b2d/manager/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.835061 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6c9b679f4-7xwnn_322fecc6-f5c2-48c6-a014-91ca3c99b00f/webhook-server/0.log" Dec 11 10:48:34 crc kubenswrapper[4663]: I1211 10:48:34.911485 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rvzrq_d6e35eff-30e8-40c3-a7bd-62f87d484142/kube-rbac-proxy/0.log" Dec 11 10:48:35 crc kubenswrapper[4663]: I1211 10:48:35.041200 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rvzrq_d6e35eff-30e8-40c3-a7bd-62f87d484142/speaker/0.log" Dec 11 10:48:42 crc kubenswrapper[4663]: I1211 10:48:42.670309 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8_862b7639-2f10-4cb5-be71-8e57bee9eb6b/util/0.log" Dec 11 10:48:42 crc kubenswrapper[4663]: I1211 10:48:42.800599 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8_862b7639-2f10-4cb5-be71-8e57bee9eb6b/util/0.log" Dec 11 10:48:42 crc kubenswrapper[4663]: I1211 10:48:42.814731 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8_862b7639-2f10-4cb5-be71-8e57bee9eb6b/pull/0.log" Dec 11 10:48:42 crc kubenswrapper[4663]: I1211 10:48:42.817677 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8_862b7639-2f10-4cb5-be71-8e57bee9eb6b/pull/0.log" Dec 11 10:48:42 crc kubenswrapper[4663]: I1211 10:48:42.925510 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8_862b7639-2f10-4cb5-be71-8e57bee9eb6b/util/0.log" Dec 11 10:48:42 crc kubenswrapper[4663]: I1211 10:48:42.925535 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8_862b7639-2f10-4cb5-be71-8e57bee9eb6b/pull/0.log" Dec 11 10:48:42 crc kubenswrapper[4663]: I1211 10:48:42.962094 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4pq4q8_862b7639-2f10-4cb5-be71-8e57bee9eb6b/extract/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.038598 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_a5d1796b-fb33-4fc8-9f29-f3847c874051/util/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.146463 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_a5d1796b-fb33-4fc8-9f29-f3847c874051/pull/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.161629 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_a5d1796b-fb33-4fc8-9f29-f3847c874051/pull/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.166007 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_a5d1796b-fb33-4fc8-9f29-f3847c874051/util/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.266459 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_a5d1796b-fb33-4fc8-9f29-f3847c874051/util/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.275990 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_a5d1796b-fb33-4fc8-9f29-f3847c874051/pull/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.278962 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210c5zf5_a5d1796b-fb33-4fc8-9f29-f3847c874051/extract/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.395914 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk_0229fac6-4edc-4051-aab4-b2d74321e50e/util/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.494807 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk_0229fac6-4edc-4051-aab4-b2d74321e50e/pull/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.498934 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk_0229fac6-4edc-4051-aab4-b2d74321e50e/util/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.503747 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk_0229fac6-4edc-4051-aab4-b2d74321e50e/pull/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.612626 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk_0229fac6-4edc-4051-aab4-b2d74321e50e/util/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.624814 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk_0229fac6-4edc-4051-aab4-b2d74321e50e/pull/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.629845 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8vn4wk_0229fac6-4edc-4051-aab4-b2d74321e50e/extract/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.722727 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z25sl_d9d5b748-8f69-4c3d-903a-5da6078bc36a/extract-utilities/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.841413 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z25sl_d9d5b748-8f69-4c3d-903a-5da6078bc36a/extract-utilities/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.844309 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z25sl_d9d5b748-8f69-4c3d-903a-5da6078bc36a/extract-content/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.849818 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z25sl_d9d5b748-8f69-4c3d-903a-5da6078bc36a/extract-content/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.947278 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z25sl_d9d5b748-8f69-4c3d-903a-5da6078bc36a/extract-content/0.log" Dec 11 10:48:43 crc kubenswrapper[4663]: I1211 10:48:43.951041 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z25sl_d9d5b748-8f69-4c3d-903a-5da6078bc36a/extract-utilities/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.065545 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qr54z_9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb/extract-utilities/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.192708 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z25sl_d9d5b748-8f69-4c3d-903a-5da6078bc36a/registry-server/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.237754 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qr54z_9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb/extract-content/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.241127 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qr54z_9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb/extract-utilities/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.257507 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qr54z_9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb/extract-content/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.357473 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qr54z_9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb/extract-utilities/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.382730 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qr54z_9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb/extract-content/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.505546 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-r4j8w_a6af4642-6ff0-46ed-9ece-b05780059f40/marketplace-operator/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.551356 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qr54z_9fdfd84e-f9c5-4956-b80e-f5c2eae0c5eb/registry-server/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.596858 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-txl9v_af7e9963-0223-494a-9b8d-494f4c643019/extract-utilities/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.683312 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-txl9v_af7e9963-0223-494a-9b8d-494f4c643019/extract-content/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.696840 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-txl9v_af7e9963-0223-494a-9b8d-494f4c643019/extract-utilities/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.718701 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-txl9v_af7e9963-0223-494a-9b8d-494f4c643019/extract-content/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.837632 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-txl9v_af7e9963-0223-494a-9b8d-494f4c643019/extract-content/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.862530 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-txl9v_af7e9963-0223-494a-9b8d-494f4c643019/extract-utilities/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.907105 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-txl9v_af7e9963-0223-494a-9b8d-494f4c643019/registry-server/0.log" Dec 11 10:48:44 crc kubenswrapper[4663]: I1211 10:48:44.956609 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-g99nl_e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9/extract-utilities/0.log" Dec 11 10:48:45 crc kubenswrapper[4663]: I1211 10:48:45.085129 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-g99nl_e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9/extract-content/0.log" Dec 11 10:48:45 crc kubenswrapper[4663]: I1211 10:48:45.088638 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-g99nl_e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9/extract-content/0.log" Dec 11 10:48:45 crc kubenswrapper[4663]: I1211 10:48:45.088703 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-g99nl_e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9/extract-utilities/0.log" Dec 11 10:48:45 crc kubenswrapper[4663]: I1211 10:48:45.196681 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-g99nl_e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9/extract-utilities/0.log" Dec 11 10:48:45 crc kubenswrapper[4663]: I1211 10:48:45.243331 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-g99nl_e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9/extract-content/0.log" Dec 11 10:48:45 crc kubenswrapper[4663]: I1211 10:48:45.385561 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-g99nl_e7a9b9ae-8043-47cc-af42-b9b5c99c6bb9/registry-server/0.log" Dec 11 10:48:52 crc kubenswrapper[4663]: I1211 10:48:52.622679 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-vp4bg_cbb769f1-696a-4afa-8ef4-a3d230a07eda/prometheus-operator/0.log" Dec 11 10:48:52 crc kubenswrapper[4663]: I1211 10:48:52.731672 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7dfbd95878-6zjcl_a1318112-f771-48fb-8023-5e494d6dc382/prometheus-operator-admission-webhook/0.log" Dec 11 10:48:52 crc kubenswrapper[4663]: I1211 10:48:52.773311 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7dfbd95878-92svc_4cafe8bf-4173-4b4a-b883-07b7450f9d92/prometheus-operator-admission-webhook/0.log" Dec 11 10:48:52 crc kubenswrapper[4663]: I1211 10:48:52.847527 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-cm44b_6be7c78d-a792-4392-b915-507ca6867772/operator/0.log" Dec 11 10:48:52 crc kubenswrapper[4663]: I1211 10:48:52.893653 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-4vkfs_1c9993fc-c5ec-49fe-a68a-4da3fe9a8ac9/perses-operator/0.log" Dec 11 10:49:29 crc kubenswrapper[4663]: I1211 10:49:29.886982 4663 generic.go:334] "Generic (PLEG): container finished" podID="e429baae-b346-40a5-9d72-5f724bb89914" containerID="7b3bb8057daa6f8599dc1b27c40c51d8bb853f46db91836e324d9042ee2b806d" exitCode=0 Dec 11 10:49:29 crc kubenswrapper[4663]: I1211 10:49:29.887062 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-d9fgd/must-gather-c2dvg" event={"ID":"e429baae-b346-40a5-9d72-5f724bb89914","Type":"ContainerDied","Data":"7b3bb8057daa6f8599dc1b27c40c51d8bb853f46db91836e324d9042ee2b806d"} Dec 11 10:49:29 crc kubenswrapper[4663]: I1211 10:49:29.887878 4663 scope.go:117] "RemoveContainer" containerID="7b3bb8057daa6f8599dc1b27c40c51d8bb853f46db91836e324d9042ee2b806d" Dec 11 10:49:30 crc kubenswrapper[4663]: I1211 10:49:30.148740 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-d9fgd_must-gather-c2dvg_e429baae-b346-40a5-9d72-5f724bb89914/gather/0.log" Dec 11 10:49:30 crc kubenswrapper[4663]: E1211 10:49:30.510349 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:49:30 crc kubenswrapper[4663]: E1211 10:49:30.510388 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" image="38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0" Dec 11 10:49:30 crc kubenswrapper[4663]: E1211 10:49:30.510480 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lrptx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-s4rfx_openstack-operators(4fa7b43e-65e7-4338-a694-8a7042d9fbcb): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \"http://38.129.56.99:5001/v2/\": dial tcp 38.129.56.99:5001: i/o timeout" logger="UnhandledError" Dec 11 10:49:30 crc kubenswrapper[4663]: E1211 10:49:30.511868 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0: pinging container registry 38.129.56.99:5001: Get \\\"http://38.129.56.99:5001/v2/\\\": dial tcp 38.129.56.99:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:49:36 crc kubenswrapper[4663]: I1211 10:49:36.636405 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-d9fgd/must-gather-c2dvg"] Dec 11 10:49:36 crc kubenswrapper[4663]: I1211 10:49:36.636893 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-d9fgd/must-gather-c2dvg" podUID="e429baae-b346-40a5-9d72-5f724bb89914" containerName="copy" containerID="cri-o://e0568e0b10248779cb98ad3d8dd750a266858917e5d411b074b1c7116de90252" gracePeriod=2 Dec 11 10:49:36 crc kubenswrapper[4663]: I1211 10:49:36.639817 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-d9fgd/must-gather-c2dvg"] Dec 11 10:49:36 crc kubenswrapper[4663]: I1211 10:49:36.920733 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-d9fgd_must-gather-c2dvg_e429baae-b346-40a5-9d72-5f724bb89914/copy/0.log" Dec 11 10:49:36 crc kubenswrapper[4663]: I1211 10:49:36.921386 4663 generic.go:334] "Generic (PLEG): container finished" podID="e429baae-b346-40a5-9d72-5f724bb89914" containerID="e0568e0b10248779cb98ad3d8dd750a266858917e5d411b074b1c7116de90252" exitCode=143 Dec 11 10:49:36 crc kubenswrapper[4663]: I1211 10:49:36.921429 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="003f3c3df61272dd907b0207d3ad621f000b0c85fdee89eb77be8c7785b7a142" Dec 11 10:49:36 crc kubenswrapper[4663]: I1211 10:49:36.938359 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-d9fgd_must-gather-c2dvg_e429baae-b346-40a5-9d72-5f724bb89914/copy/0.log" Dec 11 10:49:36 crc kubenswrapper[4663]: I1211 10:49:36.938725 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-d9fgd/must-gather-c2dvg" Dec 11 10:49:37 crc kubenswrapper[4663]: I1211 10:49:37.013147 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhzlh\" (UniqueName: \"kubernetes.io/projected/e429baae-b346-40a5-9d72-5f724bb89914-kube-api-access-dhzlh\") pod \"e429baae-b346-40a5-9d72-5f724bb89914\" (UID: \"e429baae-b346-40a5-9d72-5f724bb89914\") " Dec 11 10:49:37 crc kubenswrapper[4663]: I1211 10:49:37.013259 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e429baae-b346-40a5-9d72-5f724bb89914-must-gather-output\") pod \"e429baae-b346-40a5-9d72-5f724bb89914\" (UID: \"e429baae-b346-40a5-9d72-5f724bb89914\") " Dec 11 10:49:37 crc kubenswrapper[4663]: I1211 10:49:37.018019 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e429baae-b346-40a5-9d72-5f724bb89914-kube-api-access-dhzlh" (OuterVolumeSpecName: "kube-api-access-dhzlh") pod "e429baae-b346-40a5-9d72-5f724bb89914" (UID: "e429baae-b346-40a5-9d72-5f724bb89914"). InnerVolumeSpecName "kube-api-access-dhzlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:49:37 crc kubenswrapper[4663]: I1211 10:49:37.054898 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e429baae-b346-40a5-9d72-5f724bb89914-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "e429baae-b346-40a5-9d72-5f724bb89914" (UID: "e429baae-b346-40a5-9d72-5f724bb89914"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:49:37 crc kubenswrapper[4663]: I1211 10:49:37.114366 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhzlh\" (UniqueName: \"kubernetes.io/projected/e429baae-b346-40a5-9d72-5f724bb89914-kube-api-access-dhzlh\") on node \"crc\" DevicePath \"\"" Dec 11 10:49:37 crc kubenswrapper[4663]: I1211 10:49:37.114390 4663 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e429baae-b346-40a5-9d72-5f724bb89914-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 11 10:49:37 crc kubenswrapper[4663]: I1211 10:49:37.510132 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e429baae-b346-40a5-9d72-5f724bb89914" path="/var/lib/kubelet/pods/e429baae-b346-40a5-9d72-5f724bb89914/volumes" Dec 11 10:49:37 crc kubenswrapper[4663]: I1211 10:49:37.925478 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-d9fgd/must-gather-c2dvg" Dec 11 10:49:45 crc kubenswrapper[4663]: E1211 10:49:45.506214 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:49:59 crc kubenswrapper[4663]: E1211 10:49:59.507770 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:50:12 crc kubenswrapper[4663]: E1211 10:50:12.506415 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:50:19 crc kubenswrapper[4663]: I1211 10:50:19.271271 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:50:19 crc kubenswrapper[4663]: I1211 10:50:19.271626 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:50:25 crc kubenswrapper[4663]: E1211 10:50:25.505671 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:50:37 crc kubenswrapper[4663]: E1211 10:50:37.506171 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:50:49 crc kubenswrapper[4663]: I1211 10:50:49.271122 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:50:49 crc kubenswrapper[4663]: I1211 10:50:49.271534 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:50:49 crc kubenswrapper[4663]: E1211 10:50:49.508420 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:51:03 crc kubenswrapper[4663]: E1211 10:51:03.506105 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.788704 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fwvr2"] Dec 11 10:51:12 crc kubenswrapper[4663]: E1211 10:51:12.789353 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e429baae-b346-40a5-9d72-5f724bb89914" containerName="copy" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.789366 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="e429baae-b346-40a5-9d72-5f724bb89914" containerName="copy" Dec 11 10:51:12 crc kubenswrapper[4663]: E1211 10:51:12.789380 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e429baae-b346-40a5-9d72-5f724bb89914" containerName="gather" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.789387 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="e429baae-b346-40a5-9d72-5f724bb89914" containerName="gather" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.789498 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="e429baae-b346-40a5-9d72-5f724bb89914" containerName="copy" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.789512 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="e429baae-b346-40a5-9d72-5f724bb89914" containerName="gather" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.790310 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.798774 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fwvr2"] Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.845378 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttck5\" (UniqueName: \"kubernetes.io/projected/285336ea-6213-4343-87e1-17b65e3d8c34-kube-api-access-ttck5\") pod \"redhat-marketplace-fwvr2\" (UID: \"285336ea-6213-4343-87e1-17b65e3d8c34\") " pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.845661 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/285336ea-6213-4343-87e1-17b65e3d8c34-catalog-content\") pod \"redhat-marketplace-fwvr2\" (UID: \"285336ea-6213-4343-87e1-17b65e3d8c34\") " pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.845718 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/285336ea-6213-4343-87e1-17b65e3d8c34-utilities\") pod \"redhat-marketplace-fwvr2\" (UID: \"285336ea-6213-4343-87e1-17b65e3d8c34\") " pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.946285 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/285336ea-6213-4343-87e1-17b65e3d8c34-utilities\") pod \"redhat-marketplace-fwvr2\" (UID: \"285336ea-6213-4343-87e1-17b65e3d8c34\") " pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.946364 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttck5\" (UniqueName: \"kubernetes.io/projected/285336ea-6213-4343-87e1-17b65e3d8c34-kube-api-access-ttck5\") pod \"redhat-marketplace-fwvr2\" (UID: \"285336ea-6213-4343-87e1-17b65e3d8c34\") " pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.946424 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/285336ea-6213-4343-87e1-17b65e3d8c34-catalog-content\") pod \"redhat-marketplace-fwvr2\" (UID: \"285336ea-6213-4343-87e1-17b65e3d8c34\") " pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.946760 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/285336ea-6213-4343-87e1-17b65e3d8c34-utilities\") pod \"redhat-marketplace-fwvr2\" (UID: \"285336ea-6213-4343-87e1-17b65e3d8c34\") " pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.946787 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/285336ea-6213-4343-87e1-17b65e3d8c34-catalog-content\") pod \"redhat-marketplace-fwvr2\" (UID: \"285336ea-6213-4343-87e1-17b65e3d8c34\") " pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.961743 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttck5\" (UniqueName: \"kubernetes.io/projected/285336ea-6213-4343-87e1-17b65e3d8c34-kube-api-access-ttck5\") pod \"redhat-marketplace-fwvr2\" (UID: \"285336ea-6213-4343-87e1-17b65e3d8c34\") " pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.985102 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fw4hp"] Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.986099 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:12 crc kubenswrapper[4663]: I1211 10:51:12.991425 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fw4hp"] Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.047781 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69pmk\" (UniqueName: \"kubernetes.io/projected/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-kube-api-access-69pmk\") pod \"redhat-operators-fw4hp\" (UID: \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\") " pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.047829 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-utilities\") pod \"redhat-operators-fw4hp\" (UID: \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\") " pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.047907 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-catalog-content\") pod \"redhat-operators-fw4hp\" (UID: \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\") " pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.102907 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.148992 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69pmk\" (UniqueName: \"kubernetes.io/projected/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-kube-api-access-69pmk\") pod \"redhat-operators-fw4hp\" (UID: \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\") " pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.149041 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-utilities\") pod \"redhat-operators-fw4hp\" (UID: \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\") " pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.149095 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-catalog-content\") pod \"redhat-operators-fw4hp\" (UID: \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\") " pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.149697 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-catalog-content\") pod \"redhat-operators-fw4hp\" (UID: \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\") " pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.149741 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-utilities\") pod \"redhat-operators-fw4hp\" (UID: \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\") " pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.163363 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69pmk\" (UniqueName: \"kubernetes.io/projected/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-kube-api-access-69pmk\") pod \"redhat-operators-fw4hp\" (UID: \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\") " pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.247571 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fwvr2"] Dec 11 10:51:13 crc kubenswrapper[4663]: W1211 10:51:13.260307 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod285336ea_6213_4343_87e1_17b65e3d8c34.slice/crio-e8828db3bcc8a6c24d822cee1c2bcdb8eefe4e70e0c8a4919965273d67e76fe3 WatchSource:0}: Error finding container e8828db3bcc8a6c24d822cee1c2bcdb8eefe4e70e0c8a4919965273d67e76fe3: Status 404 returned error can't find the container with id e8828db3bcc8a6c24d822cee1c2bcdb8eefe4e70e0c8a4919965273d67e76fe3 Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.298699 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.337609 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fwvr2" event={"ID":"285336ea-6213-4343-87e1-17b65e3d8c34","Type":"ContainerStarted","Data":"e8828db3bcc8a6c24d822cee1c2bcdb8eefe4e70e0c8a4919965273d67e76fe3"} Dec 11 10:51:13 crc kubenswrapper[4663]: I1211 10:51:13.650474 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fw4hp"] Dec 11 10:51:13 crc kubenswrapper[4663]: W1211 10:51:13.655255 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e8d2b9f_9857_424c_bfc3_99b42ec067b7.slice/crio-66316c37206dca3af6713d0f00deb968d37122c53cb79d3b77d140ba9b63e183 WatchSource:0}: Error finding container 66316c37206dca3af6713d0f00deb968d37122c53cb79d3b77d140ba9b63e183: Status 404 returned error can't find the container with id 66316c37206dca3af6713d0f00deb968d37122c53cb79d3b77d140ba9b63e183 Dec 11 10:51:14 crc kubenswrapper[4663]: I1211 10:51:14.343706 4663 generic.go:334] "Generic (PLEG): container finished" podID="8e8d2b9f-9857-424c-bfc3-99b42ec067b7" containerID="7b0302bbd9a341fc400b8fac02c8b4d5cdcb4619e99a9f74deb5152da8d3c93c" exitCode=0 Dec 11 10:51:14 crc kubenswrapper[4663]: I1211 10:51:14.343747 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw4hp" event={"ID":"8e8d2b9f-9857-424c-bfc3-99b42ec067b7","Type":"ContainerDied","Data":"7b0302bbd9a341fc400b8fac02c8b4d5cdcb4619e99a9f74deb5152da8d3c93c"} Dec 11 10:51:14 crc kubenswrapper[4663]: I1211 10:51:14.343935 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw4hp" event={"ID":"8e8d2b9f-9857-424c-bfc3-99b42ec067b7","Type":"ContainerStarted","Data":"66316c37206dca3af6713d0f00deb968d37122c53cb79d3b77d140ba9b63e183"} Dec 11 10:51:14 crc kubenswrapper[4663]: I1211 10:51:14.345288 4663 generic.go:334] "Generic (PLEG): container finished" podID="285336ea-6213-4343-87e1-17b65e3d8c34" containerID="1a1b141ab0ac4ce20b50bbc2a67c37d9b1faca008576cc600e743c1c7d23c827" exitCode=0 Dec 11 10:51:14 crc kubenswrapper[4663]: I1211 10:51:14.345342 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fwvr2" event={"ID":"285336ea-6213-4343-87e1-17b65e3d8c34","Type":"ContainerDied","Data":"1a1b141ab0ac4ce20b50bbc2a67c37d9b1faca008576cc600e743c1c7d23c827"} Dec 11 10:51:14 crc kubenswrapper[4663]: I1211 10:51:14.345384 4663 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:51:15 crc kubenswrapper[4663]: I1211 10:51:15.350606 4663 generic.go:334] "Generic (PLEG): container finished" podID="285336ea-6213-4343-87e1-17b65e3d8c34" containerID="07d8f4cff119cf99ad4dc6a4f7d0cff51a6240d2599b75524e77cfd97d1688e4" exitCode=0 Dec 11 10:51:15 crc kubenswrapper[4663]: I1211 10:51:15.350699 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fwvr2" event={"ID":"285336ea-6213-4343-87e1-17b65e3d8c34","Type":"ContainerDied","Data":"07d8f4cff119cf99ad4dc6a4f7d0cff51a6240d2599b75524e77cfd97d1688e4"} Dec 11 10:51:15 crc kubenswrapper[4663]: I1211 10:51:15.353102 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw4hp" event={"ID":"8e8d2b9f-9857-424c-bfc3-99b42ec067b7","Type":"ContainerStarted","Data":"2f83a0a1eab6c0ea9181af9264123f711ea6433a5a088cac357631ec99af2134"} Dec 11 10:51:16 crc kubenswrapper[4663]: I1211 10:51:16.359931 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fwvr2" event={"ID":"285336ea-6213-4343-87e1-17b65e3d8c34","Type":"ContainerStarted","Data":"a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1"} Dec 11 10:51:16 crc kubenswrapper[4663]: I1211 10:51:16.361865 4663 generic.go:334] "Generic (PLEG): container finished" podID="8e8d2b9f-9857-424c-bfc3-99b42ec067b7" containerID="2f83a0a1eab6c0ea9181af9264123f711ea6433a5a088cac357631ec99af2134" exitCode=0 Dec 11 10:51:16 crc kubenswrapper[4663]: I1211 10:51:16.361900 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw4hp" event={"ID":"8e8d2b9f-9857-424c-bfc3-99b42ec067b7","Type":"ContainerDied","Data":"2f83a0a1eab6c0ea9181af9264123f711ea6433a5a088cac357631ec99af2134"} Dec 11 10:51:16 crc kubenswrapper[4663]: I1211 10:51:16.373560 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fwvr2" podStartSLOduration=2.828563885 podStartE2EDuration="4.373544818s" podCreationTimestamp="2025-12-11 10:51:12 +0000 UTC" firstStartedPulling="2025-12-11 10:51:14.346310001 +0000 UTC m=+2005.164239785" lastFinishedPulling="2025-12-11 10:51:15.891290934 +0000 UTC m=+2006.709220718" observedRunningTime="2025-12-11 10:51:16.371468524 +0000 UTC m=+2007.189398309" watchObservedRunningTime="2025-12-11 10:51:16.373544818 +0000 UTC m=+2007.191474602" Dec 11 10:51:17 crc kubenswrapper[4663]: I1211 10:51:17.367865 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw4hp" event={"ID":"8e8d2b9f-9857-424c-bfc3-99b42ec067b7","Type":"ContainerStarted","Data":"8c7a823e5aaf5a2f7bb1f2b37d5e4d117ddf8151b24d99a5450649d458690066"} Dec 11 10:51:17 crc kubenswrapper[4663]: I1211 10:51:17.380943 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fw4hp" podStartSLOduration=2.85087876 podStartE2EDuration="5.380927667s" podCreationTimestamp="2025-12-11 10:51:12 +0000 UTC" firstStartedPulling="2025-12-11 10:51:14.345111267 +0000 UTC m=+2005.163041052" lastFinishedPulling="2025-12-11 10:51:16.875160176 +0000 UTC m=+2007.693089959" observedRunningTime="2025-12-11 10:51:17.380857835 +0000 UTC m=+2008.198787619" watchObservedRunningTime="2025-12-11 10:51:17.380927667 +0000 UTC m=+2008.198857450" Dec 11 10:51:17 crc kubenswrapper[4663]: E1211 10:51:17.505524 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:51:19 crc kubenswrapper[4663]: I1211 10:51:19.271221 4663 patch_prober.go:28] interesting pod/machine-config-daemon-mq524 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:51:19 crc kubenswrapper[4663]: I1211 10:51:19.271454 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:51:19 crc kubenswrapper[4663]: I1211 10:51:19.271492 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mq524" Dec 11 10:51:19 crc kubenswrapper[4663]: I1211 10:51:19.271878 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"66b2e54d1041d60b055a6fae881d1566bf4ee973b483ad09b33e34ae87d7377a"} pod="openshift-machine-config-operator/machine-config-daemon-mq524" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:51:19 crc kubenswrapper[4663]: I1211 10:51:19.271926 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mq524" podUID="98941020-2764-490d-81db-9decd21aaa13" containerName="machine-config-daemon" containerID="cri-o://66b2e54d1041d60b055a6fae881d1566bf4ee973b483ad09b33e34ae87d7377a" gracePeriod=600 Dec 11 10:51:20 crc kubenswrapper[4663]: I1211 10:51:20.384663 4663 generic.go:334] "Generic (PLEG): container finished" podID="98941020-2764-490d-81db-9decd21aaa13" containerID="66b2e54d1041d60b055a6fae881d1566bf4ee973b483ad09b33e34ae87d7377a" exitCode=0 Dec 11 10:51:20 crc kubenswrapper[4663]: I1211 10:51:20.384729 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerDied","Data":"66b2e54d1041d60b055a6fae881d1566bf4ee973b483ad09b33e34ae87d7377a"} Dec 11 10:51:20 crc kubenswrapper[4663]: I1211 10:51:20.384882 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mq524" event={"ID":"98941020-2764-490d-81db-9decd21aaa13","Type":"ContainerStarted","Data":"919ef440d433da94aa713ed127f778551926e3a31e93c3d742bb3374cd2d1468"} Dec 11 10:51:20 crc kubenswrapper[4663]: I1211 10:51:20.384903 4663 scope.go:117] "RemoveContainer" containerID="abeb3842eaaa5afa0cefde23a6fdc88c89d284dce3dd5516d78795f1625b7907" Dec 11 10:51:23 crc kubenswrapper[4663]: I1211 10:51:23.103811 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:23 crc kubenswrapper[4663]: I1211 10:51:23.104032 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:23 crc kubenswrapper[4663]: I1211 10:51:23.129582 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:23 crc kubenswrapper[4663]: I1211 10:51:23.298769 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:23 crc kubenswrapper[4663]: I1211 10:51:23.298805 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:23 crc kubenswrapper[4663]: I1211 10:51:23.326721 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:23 crc kubenswrapper[4663]: I1211 10:51:23.425975 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:23 crc kubenswrapper[4663]: I1211 10:51:23.426217 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.151120 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fwvr2"] Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.408040 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fwvr2" podUID="285336ea-6213-4343-87e1-17b65e3d8c34" containerName="registry-server" containerID="cri-o://a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1" gracePeriod=2 Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.694321 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.751790 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fw4hp"] Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.751985 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fw4hp" podUID="8e8d2b9f-9857-424c-bfc3-99b42ec067b7" containerName="registry-server" containerID="cri-o://8c7a823e5aaf5a2f7bb1f2b37d5e4d117ddf8151b24d99a5450649d458690066" gracePeriod=2 Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.778836 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/285336ea-6213-4343-87e1-17b65e3d8c34-utilities\") pod \"285336ea-6213-4343-87e1-17b65e3d8c34\" (UID: \"285336ea-6213-4343-87e1-17b65e3d8c34\") " Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.778908 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/285336ea-6213-4343-87e1-17b65e3d8c34-catalog-content\") pod \"285336ea-6213-4343-87e1-17b65e3d8c34\" (UID: \"285336ea-6213-4343-87e1-17b65e3d8c34\") " Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.778931 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttck5\" (UniqueName: \"kubernetes.io/projected/285336ea-6213-4343-87e1-17b65e3d8c34-kube-api-access-ttck5\") pod \"285336ea-6213-4343-87e1-17b65e3d8c34\" (UID: \"285336ea-6213-4343-87e1-17b65e3d8c34\") " Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.779551 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/285336ea-6213-4343-87e1-17b65e3d8c34-utilities" (OuterVolumeSpecName: "utilities") pod "285336ea-6213-4343-87e1-17b65e3d8c34" (UID: "285336ea-6213-4343-87e1-17b65e3d8c34"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.783016 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/285336ea-6213-4343-87e1-17b65e3d8c34-kube-api-access-ttck5" (OuterVolumeSpecName: "kube-api-access-ttck5") pod "285336ea-6213-4343-87e1-17b65e3d8c34" (UID: "285336ea-6213-4343-87e1-17b65e3d8c34"). InnerVolumeSpecName "kube-api-access-ttck5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.861958 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/285336ea-6213-4343-87e1-17b65e3d8c34-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "285336ea-6213-4343-87e1-17b65e3d8c34" (UID: "285336ea-6213-4343-87e1-17b65e3d8c34"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.880001 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/285336ea-6213-4343-87e1-17b65e3d8c34-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.880028 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/285336ea-6213-4343-87e1-17b65e3d8c34-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:51:25 crc kubenswrapper[4663]: I1211 10:51:25.880039 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttck5\" (UniqueName: \"kubernetes.io/projected/285336ea-6213-4343-87e1-17b65e3d8c34-kube-api-access-ttck5\") on node \"crc\" DevicePath \"\"" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.417033 4663 generic.go:334] "Generic (PLEG): container finished" podID="285336ea-6213-4343-87e1-17b65e3d8c34" containerID="a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1" exitCode=0 Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.417085 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fwvr2" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.417097 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fwvr2" event={"ID":"285336ea-6213-4343-87e1-17b65e3d8c34","Type":"ContainerDied","Data":"a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1"} Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.417380 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fwvr2" event={"ID":"285336ea-6213-4343-87e1-17b65e3d8c34","Type":"ContainerDied","Data":"e8828db3bcc8a6c24d822cee1c2bcdb8eefe4e70e0c8a4919965273d67e76fe3"} Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.417397 4663 scope.go:117] "RemoveContainer" containerID="a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.419362 4663 generic.go:334] "Generic (PLEG): container finished" podID="8e8d2b9f-9857-424c-bfc3-99b42ec067b7" containerID="8c7a823e5aaf5a2f7bb1f2b37d5e4d117ddf8151b24d99a5450649d458690066" exitCode=0 Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.419396 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw4hp" event={"ID":"8e8d2b9f-9857-424c-bfc3-99b42ec067b7","Type":"ContainerDied","Data":"8c7a823e5aaf5a2f7bb1f2b37d5e4d117ddf8151b24d99a5450649d458690066"} Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.435609 4663 scope.go:117] "RemoveContainer" containerID="07d8f4cff119cf99ad4dc6a4f7d0cff51a6240d2599b75524e77cfd97d1688e4" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.439989 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fwvr2"] Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.443549 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fwvr2"] Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.460856 4663 scope.go:117] "RemoveContainer" containerID="1a1b141ab0ac4ce20b50bbc2a67c37d9b1faca008576cc600e743c1c7d23c827" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.471761 4663 scope.go:117] "RemoveContainer" containerID="a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1" Dec 11 10:51:26 crc kubenswrapper[4663]: E1211 10:51:26.472112 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1\": container with ID starting with a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1 not found: ID does not exist" containerID="a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.472144 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1"} err="failed to get container status \"a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1\": rpc error: code = NotFound desc = could not find container \"a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1\": container with ID starting with a7abdc10f6165b3f25d7cbfc479b0c2f2c2b01c03cc54ef162db404a32e8cad1 not found: ID does not exist" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.472165 4663 scope.go:117] "RemoveContainer" containerID="07d8f4cff119cf99ad4dc6a4f7d0cff51a6240d2599b75524e77cfd97d1688e4" Dec 11 10:51:26 crc kubenswrapper[4663]: E1211 10:51:26.472568 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07d8f4cff119cf99ad4dc6a4f7d0cff51a6240d2599b75524e77cfd97d1688e4\": container with ID starting with 07d8f4cff119cf99ad4dc6a4f7d0cff51a6240d2599b75524e77cfd97d1688e4 not found: ID does not exist" containerID="07d8f4cff119cf99ad4dc6a4f7d0cff51a6240d2599b75524e77cfd97d1688e4" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.472600 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07d8f4cff119cf99ad4dc6a4f7d0cff51a6240d2599b75524e77cfd97d1688e4"} err="failed to get container status \"07d8f4cff119cf99ad4dc6a4f7d0cff51a6240d2599b75524e77cfd97d1688e4\": rpc error: code = NotFound desc = could not find container \"07d8f4cff119cf99ad4dc6a4f7d0cff51a6240d2599b75524e77cfd97d1688e4\": container with ID starting with 07d8f4cff119cf99ad4dc6a4f7d0cff51a6240d2599b75524e77cfd97d1688e4 not found: ID does not exist" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.472619 4663 scope.go:117] "RemoveContainer" containerID="1a1b141ab0ac4ce20b50bbc2a67c37d9b1faca008576cc600e743c1c7d23c827" Dec 11 10:51:26 crc kubenswrapper[4663]: E1211 10:51:26.472897 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a1b141ab0ac4ce20b50bbc2a67c37d9b1faca008576cc600e743c1c7d23c827\": container with ID starting with 1a1b141ab0ac4ce20b50bbc2a67c37d9b1faca008576cc600e743c1c7d23c827 not found: ID does not exist" containerID="1a1b141ab0ac4ce20b50bbc2a67c37d9b1faca008576cc600e743c1c7d23c827" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.472917 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a1b141ab0ac4ce20b50bbc2a67c37d9b1faca008576cc600e743c1c7d23c827"} err="failed to get container status \"1a1b141ab0ac4ce20b50bbc2a67c37d9b1faca008576cc600e743c1c7d23c827\": rpc error: code = NotFound desc = could not find container \"1a1b141ab0ac4ce20b50bbc2a67c37d9b1faca008576cc600e743c1c7d23c827\": container with ID starting with 1a1b141ab0ac4ce20b50bbc2a67c37d9b1faca008576cc600e743c1c7d23c827 not found: ID does not exist" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.550788 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.688658 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-utilities\") pod \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\" (UID: \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\") " Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.688758 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69pmk\" (UniqueName: \"kubernetes.io/projected/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-kube-api-access-69pmk\") pod \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\" (UID: \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\") " Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.688787 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-catalog-content\") pod \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\" (UID: \"8e8d2b9f-9857-424c-bfc3-99b42ec067b7\") " Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.689347 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-utilities" (OuterVolumeSpecName: "utilities") pod "8e8d2b9f-9857-424c-bfc3-99b42ec067b7" (UID: "8e8d2b9f-9857-424c-bfc3-99b42ec067b7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.692048 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-kube-api-access-69pmk" (OuterVolumeSpecName: "kube-api-access-69pmk") pod "8e8d2b9f-9857-424c-bfc3-99b42ec067b7" (UID: "8e8d2b9f-9857-424c-bfc3-99b42ec067b7"). InnerVolumeSpecName "kube-api-access-69pmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.780174 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8e8d2b9f-9857-424c-bfc3-99b42ec067b7" (UID: "8e8d2b9f-9857-424c-bfc3-99b42ec067b7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.789893 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.789920 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69pmk\" (UniqueName: \"kubernetes.io/projected/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-kube-api-access-69pmk\") on node \"crc\" DevicePath \"\"" Dec 11 10:51:26 crc kubenswrapper[4663]: I1211 10:51:26.789930 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e8d2b9f-9857-424c-bfc3-99b42ec067b7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:51:27 crc kubenswrapper[4663]: I1211 10:51:27.427243 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw4hp" event={"ID":"8e8d2b9f-9857-424c-bfc3-99b42ec067b7","Type":"ContainerDied","Data":"66316c37206dca3af6713d0f00deb968d37122c53cb79d3b77d140ba9b63e183"} Dec 11 10:51:27 crc kubenswrapper[4663]: I1211 10:51:27.427287 4663 scope.go:117] "RemoveContainer" containerID="8c7a823e5aaf5a2f7bb1f2b37d5e4d117ddf8151b24d99a5450649d458690066" Dec 11 10:51:27 crc kubenswrapper[4663]: I1211 10:51:27.427384 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fw4hp" Dec 11 10:51:27 crc kubenswrapper[4663]: I1211 10:51:27.442836 4663 scope.go:117] "RemoveContainer" containerID="2f83a0a1eab6c0ea9181af9264123f711ea6433a5a088cac357631ec99af2134" Dec 11 10:51:27 crc kubenswrapper[4663]: I1211 10:51:27.451975 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fw4hp"] Dec 11 10:51:27 crc kubenswrapper[4663]: I1211 10:51:27.454657 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fw4hp"] Dec 11 10:51:27 crc kubenswrapper[4663]: I1211 10:51:27.485229 4663 scope.go:117] "RemoveContainer" containerID="7b0302bbd9a341fc400b8fac02c8b4d5cdcb4619e99a9f74deb5152da8d3c93c" Dec 11 10:51:27 crc kubenswrapper[4663]: I1211 10:51:27.509892 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="285336ea-6213-4343-87e1-17b65e3d8c34" path="/var/lib/kubelet/pods/285336ea-6213-4343-87e1-17b65e3d8c34/volumes" Dec 11 10:51:27 crc kubenswrapper[4663]: I1211 10:51:27.510455 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e8d2b9f-9857-424c-bfc3-99b42ec067b7" path="/var/lib/kubelet/pods/8e8d2b9f-9857-424c-bfc3-99b42ec067b7/volumes" Dec 11 10:51:30 crc kubenswrapper[4663]: E1211 10:51:30.506010 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:51:45 crc kubenswrapper[4663]: E1211 10:51:45.505789 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:52:00 crc kubenswrapper[4663]: E1211 10:52:00.505590 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:52:14 crc kubenswrapper[4663]: E1211 10:52:14.506772 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:52:25 crc kubenswrapper[4663]: E1211 10:52:25.506102 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:52:38 crc kubenswrapper[4663]: E1211 10:52:38.505945 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:52:52 crc kubenswrapper[4663]: E1211 10:52:52.506711 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" Dec 11 10:53:05 crc kubenswrapper[4663]: E1211 10:53:05.506239 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.99:5001/openstack-k8s-operators/openstack-operator-index:3ed83d885437822e616142d4e07e8ac35013d1a0\\\"\"" pod="openstack-operators/openstack-operator-index-s4rfx" podUID="4fa7b43e-65e7-4338-a694-8a7042d9fbcb" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515116521230024441 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015116521231017357 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015116514645016515 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015116514645015465 5ustar corecore